lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
mit
|
8753f12131a9df9574ef22ab4e3a98306b263381
| 0
|
crttcr/Args
|
package com.xivvic.args.schema.item;
import java.util.Map;
import com.xivvic.args.error.ErrorCode;
import com.xivvic.args.error.SchemaException;
import com.xivvic.args.marshall.OptEvaluator;
import com.xivvic.args.marshall.OptEvaluatorBase;
import com.xivvic.args.schema.OptionType;
import com.xivvic.args.util.BooleanUtil;
import lombok.Getter;
import lombok.ToString;
@Getter
@ToString
public class Item<T>
{
public final static String NAME = "name";
public final static String TYPE = "type";
public final static String DESCRIPTION = "description";
public final static String DEFAULT = "dv";
public final static String REQUIRED = "required";
public final static String ENV_VAR = "ev";
private String name;
private OptionType type;
private OptEvaluator<T> eval;
private Boolean required;
private String description;
private String ev;
private Item()
{
}
public Boolean getRequired()
{
if (required == null)
{
return null;
}
return required;
}
public static <U> Builder<U> builder()
{
Builder<U> rv = new Builder<>();
return rv;
}
public static <U> Builder<U> builder(Map<String, String> args) throws SchemaException
{
Builder<U> rv = new Builder<>();
if (args == null)
{
return rv;
}
String name = args.get(NAME);
String type = args.get(TYPE);
String desc = args.get(DESCRIPTION);
String reqd = args.get(REQUIRED);
String ev = args.get(ENV_VAR);
String dv = args.get(DEFAULT);
if (name != null) { rv.name(name); }
if (type != null) { rv.type(type); }
if (desc != null) { rv.description(desc); }
if (reqd != null) { rv.required(reqd); }
if (ev != null) { rv.ev(ev); }
if (dv != null) { rv.dv(dv); } // Occurs after type() is set.
return rv;
}
public static class Builder<T>
{
// If you were to make this field final, you can't reuse the builder to create a different object.
//
// Reuse would permit you to potentially leak changes to the object if the caller holds on
// to your builder, so each builder can only build a single instance.
//
private final Item<T> instance = new Item<T>();
private String dv;
public Builder<T> name(String name)
{
this.instance.name = name;
return this;
}
public Builder<T> type(String type) throws SchemaException
{
if (type == null)
{
this.instance.type = null;
this.instance.eval = null;
return this;
}
try
{
OptionType ot = OptionType.valueOf(type);
this.instance.type = ot;
OptEvaluator<T> oe = OptEvaluatorBase.getEvaluatorForType(ot);
this.instance.eval = oe;
}
catch (Exception e)
{
throw new SchemaException(ErrorCode.INVALID_SCHEMA_ELEMENT, instance.name, type);
}
return this;
}
public Builder<T> description(String description)
{
this.instance.description = description;
return this;
}
public Builder<T> ev(String ev)
{
this.instance.ev = ev;
return this;
}
public Builder<T> dv(String dv)
{
this.dv = dv;
return this;
}
public Builder<T> required(String required)
{
this.instance.required = BooleanUtil.parseBoolean(required);
return this;
}
@SuppressWarnings("unchecked")
public Item<T> build() throws SchemaException
{
assertValid();
if (dv != null)
{
instance.eval.setDefaultValue(dv);
}
Item<T> result = instance;
return result;
}
private void assertValid() throws SchemaException
{
if (instance.name == null)
{
String msg = String.format("Items require a valid name: [%s]", instance);
throw new SchemaException(ErrorCode.INVALID_SCHEMA_ELEMENT, msg);
}
if (instance.type == null)
{
String msg = String.format("Items require a valid type: [%s]", instance);
throw new SchemaException(ErrorCode.INVALID_SCHEMA_ELEMENT, msg);
}
if (instance.eval == null)
{
String msg = String.format("Items require a valid evaluator: [%s]", instance);
throw new SchemaException(ErrorCode.INVALID_SCHEMA_ELEMENT, msg);
}
}
}
}
|
src/main/java/com/xivvic/args/schema/item/Item.java
|
package com.xivvic.args.schema.item;
import java.util.Map;
import com.xivvic.args.error.ErrorCode;
import com.xivvic.args.error.SchemaException;
import com.xivvic.args.marshall.OptEvaluator;
import com.xivvic.args.marshall.OptEvaluatorBase;
import com.xivvic.args.schema.OptionType;
import com.xivvic.args.util.BooleanUtil;
import lombok.Getter;
import lombok.ToString;
@Getter
@ToString
public class Item<T>
{
public final static String NAME = "name";
public final static String TYPE = "type";
public final static String DESCRIPTION = "description";
public final static String DEFAULT = "dv";
public final static String REQUIRED = "required";
public final static String ENV_VAR = "ev";
private String name;
private OptionType type;
private OptEvaluator<T> eval;
private Boolean required;
private String description;
private String ev;
private Item()
{
}
// private Item(String name, OptionType type, OptEvaluator<T> eval)
// {
// this(name, type, eval, Boolean.FALSE, null, null, null);
// }
//
// private Item(String name, OptionType type, OptEvaluator<T> eval, String description, String ev, String dv)
// {
// this(name, type, eval, Boolean.FALSE, description, ev, dv);
// }
//
// private Item(String name, OptionType type, OptEvaluator<T> eval, Boolean required, String description, String ev, String dv)
// {
// this.name = name;
// this.type = type;
// this.eval = eval;
// this.required = required;
// this.description = description;
// this.ev = ev;
// }
//
public Boolean getRequired()
{
if (required == null)
{
return null;
}
return required;
}
public static <U> Builder<U> builder()
{
Builder<U> rv = new Builder<>();
return rv;
}
public static <U> Builder<U> builder(Map<String, String> args) throws SchemaException
{
Builder<U> rv = new Builder<>();
if (args == null)
{
return rv;
}
String name = args.get(NAME);
String type = args.get(TYPE);
String desc = args.get(DESCRIPTION);
String reqd = args.get(REQUIRED);
String ev = args.get(ENV_VAR);
String dv = args.get(DEFAULT);
if (name != null) { rv.name(name); }
if (type != null) { rv.type(type); }
if (desc != null) { rv.description(desc); }
if (reqd != null) { rv.required(reqd); }
if (ev != null) { rv.ev(ev); }
if (dv != null) { rv.dv(dv); } // Occurs after type() is set.
return rv;
}
public static class Builder<T>
{
// If you were to make this field final, you can't reuse the builder to create a different object.
//
// Reuse would permit you to potentially leak changes to the object if the caller holds on
// to your builder, so each builder can only build a single instance.
//
private final Item<T> instance = new Item<T>();
private String dv;
public Builder<T> name(String name)
{
this.instance.name = name;
return this;
}
public Builder<T> type(String type) throws SchemaException
{
if (type == null)
{
this.instance.type = null;
this.instance.eval = null;
return this;
}
try
{
OptionType ot = OptionType.valueOf(type);
this.instance.type = ot;
OptEvaluator<T> oe = OptEvaluatorBase.getEvaluatorForType(ot);
this.instance.eval = oe;
}
catch (Exception e)
{
throw new SchemaException(ErrorCode.INVALID_SCHEMA_ELEMENT, instance.name, type);
}
return this;
}
public Builder<T> description(String description)
{
this.instance.description = description;
return this;
}
public Builder<T> ev(String ev)
{
this.instance.ev = ev;
return this;
}
public Builder<T> dv(String dv)
{
this.dv = dv;
return this;
}
public Builder<T> required(String required)
{
this.instance.required = BooleanUtil.parseBoolean(required);
return this;
}
@SuppressWarnings("unchecked")
public Item<T> build() throws SchemaException
{
assertValid();
if (dv != null)
{
instance.eval.setDefaultValue(dv);
}
Item<T> result = instance;
return result;
}
private void assertValid() throws SchemaException
{
if (instance.name == null)
{
String msg = String.format("Items require a valid name: [%s]", instance);
throw new SchemaException(ErrorCode.INVALID_SCHEMA_ELEMENT, msg);
}
if (instance.type == null)
{
String msg = String.format("Items require a valid type: [%s]", instance);
throw new SchemaException(ErrorCode.INVALID_SCHEMA_ELEMENT, msg);
}
if (instance.eval == null)
{
String msg = String.format("Items require a valid evaluator: [%s]", instance);
throw new SchemaException(ErrorCode.INVALID_SCHEMA_ELEMENT, msg);
}
}
}
}
|
Remove unneeded constructors
|
src/main/java/com/xivvic/args/schema/item/Item.java
|
Remove unneeded constructors
|
|
Java
|
epl-1.0
|
08e8049a7a9ef7eea3099e323fe530d8833d3f38
| 0
|
bendisposto/prob2-ui,bendisposto/prob2-ui,bendisposto/prob2-ui,bendisposto/prob2-ui
|
package de.prob2.ui.menu;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.Optional;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import de.be4.classicalb.core.parser.exceptions.BException;
import de.codecentric.centerdevice.MenuToolkit;
import de.prob.scripting.Api;
import de.prob.statespace.AnimationSelector;
import de.prob.statespace.StateSpace;
import de.prob.statespace.Trace;
import de.prob2.ui.ProB2;
import de.prob2.ui.dotty.DottyStage;
import de.prob2.ui.formula.FormulaGenerator;
import de.prob2.ui.groovy.GroovyConsoleStage;
import de.prob2.ui.modelchecking.ModelcheckingController;
import de.prob2.ui.modelchecking.ModelcheckingStage;
import de.prob2.ui.preferences.PreferencesStage;
import de.prob2.ui.prob2fx.CurrentTrace;
import de.prob2.ui.states.BlacklistStage;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Alert;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuBar;
import javafx.scene.control.MenuItem;
import javafx.scene.control.SeparatorMenuItem;
import javafx.scene.control.TextInputDialog;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import javafx.stage.Window;
@Singleton
public class MenuController extends MenuBar {
private final Api api;
private final AnimationSelector animationSelector;
private final CurrentTrace currentTrace;
private final BlacklistStage blacklistStage;
private final PreferencesStage preferencesStage;
private final ModelcheckingController modelcheckingController;
private final Stage mcheckStage;
private final FormulaGenerator formulaGenerator;
private final GroovyConsoleStage groovyConsoleStage;
private Window window;
private DottyStage dottyStage;
@FXML private MenuItem enterFormulaForVisualization;
@FXML
private void handleLoadDefault() {
FXMLLoader loader = ProB2.injector.getInstance(FXMLLoader.class);
loader.setLocation(getClass().getResource("../main.fxml"));
try {
loader.load();
} catch (IOException e) {
System.err.println("Failed to load FXML-File!");
e.printStackTrace();
}
Parent root = loader.getRoot();
Scene scene = new Scene(root, window.getWidth(), window.getHeight());
((Stage) window).setScene(scene);
}
@FXML
private void handleLoadPerspective() {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Open File");
fileChooser.getExtensionFilters().addAll(new FileChooser.ExtensionFilter("FXML Files", "*.fxml"));
File selectedFile = fileChooser.showOpenDialog(window);
if (selectedFile != null)
try {
FXMLLoader loader = ProB2.injector.getInstance(FXMLLoader.class);
loader.setLocation(new URL("file://" + selectedFile.getPath()));
loader.load();
Parent root = loader.getRoot();
Scene scene = new Scene(root, window.getHeight(), window.getWidth());
((Stage) window).setScene(scene);
} catch (IOException e) {
System.err.println("Failed to load FXML-File!");
e.printStackTrace();
}
}
@FXML
private void handleOpen(ActionEvent event) {
final FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Open File");
fileChooser.getExtensionFilters().addAll(
// new FileChooser.ExtensionFilter("All Files", "*.*"),
new FileChooser.ExtensionFilter("Classical B Files", "*.mch", "*.ref", "*.imp")// ,
// new FileChooser.ExtensionFilter("EventB Files", "*.eventb", "*.bum",
// "*.buc"),
// new FileChooser.ExtensionFilter("CSP Files", "*.cspm")
);
final File selectedFile = fileChooser.showOpenDialog(this.window);
if (selectedFile == null) {
return;
}
switch (fileChooser.getSelectedExtensionFilter().getDescription()) {
case "Classical B Files":
final StateSpace newSpace;
try {
newSpace = this.api.b_load(selectedFile.getAbsolutePath());
} catch (IOException | BException e) {
e.printStackTrace();
Alert alert = new Alert(Alert.AlertType.ERROR, "Could not open file:\n" + e);
alert.getDialogPane().getStylesheets().add("prob.css");
alert.showAndWait();
return;
}
this.animationSelector.addNewAnimation(new Trace(newSpace));
modelcheckingController.resetView();
break;
default:
throw new IllegalStateException(
"Unknown file type selected: " + fileChooser.getSelectedExtensionFilter().getDescription());
}
}
@FXML
private void handleEditBlacklist(ActionEvent event) {
this.blacklistStage.show();
this.blacklistStage.toFront();
}
@FXML
private void handlePreferences(ActionEvent event) {
this.preferencesStage.show();
this.preferencesStage.toFront();
}
@FXML
private void handleFormulaInput(ActionEvent event) {
TextInputDialog dialog = new TextInputDialog();
dialog.setTitle("Enter Formula for Visualization");
dialog.setHeaderText("Enter Formula for Visualization");
dialog.setContentText("Enter Formula: ");
dialog.getDialogPane().getStylesheets().add("prob.css");
Optional<String> result = dialog.showAndWait();
if (result.isPresent()) {
formulaGenerator.parseAndShowFormula(result.get());
}
}
@FXML
private void handleModelCheck(ActionEvent event) {
this.mcheckStage.showAndWait();
this.mcheckStage.toFront();
}
@FXML
private void handleDotty(ActionEvent event) {
this.dottyStage.showAndWait();
}
@FXML
public void handleGroovyConsole(ActionEvent event) {
this.groovyConsoleStage.show();
this.groovyConsoleStage.toFront();
}
@FXML
public void initialize() {
this.sceneProperty().addListener((observable, from, to) -> {
if (to != null) {
to.windowProperty().addListener((observable1, from1, to1) -> {
this.window = to1;
this.mcheckStage.initOwner(this.window);
});
}
});
this.enterFormulaForVisualization.disableProperty().bind(currentTrace.existsProperty().not());
}
@Inject
private MenuController(final FXMLLoader loader, final Api api, final AnimationSelector animationSelector, final CurrentTrace currentTrace,
final BlacklistStage blacklistStage, final PreferencesStage preferencesStage,
final ModelcheckingStage modelcheckingStage, final ModelcheckingController modelcheckingController,
final FormulaGenerator formulaGenerator, final DottyStage dottyStage, final GroovyConsoleStage groovyConsoleStage) {
this.api = api;
this.animationSelector = animationSelector;
this.currentTrace = currentTrace;
this.blacklistStage = blacklistStage;
this.preferencesStage = preferencesStage;
this.formulaGenerator = formulaGenerator;
this.modelcheckingController = modelcheckingController;
this.mcheckStage = modelcheckingStage;
this.dottyStage = dottyStage;
this.groovyConsoleStage = groovyConsoleStage;
try {
loader.setLocation(getClass().getResource("menu.fxml"));
loader.setRoot(this);
loader.setController(this);
loader.load();
} catch (IOException e) {
e.printStackTrace();
}
if (System.getProperty("os.name", "").toLowerCase().contains("mac")) {
// Mac-specific menu stuff
this.setUseSystemMenuBar(true);
MenuToolkit tk = MenuToolkit.toolkit();
// Create Mac-style application menu
Menu applicationMenu = tk.createDefaultApplicationMenu("ProB 2");
this.getMenus().add(0, applicationMenu);
tk.setApplicationMenu(applicationMenu);
// Move About menu item from Help to application menu
Menu helpMenu = this.getMenus().get(this.getMenus().size() - 1);
MenuItem aboutItem = helpMenu.getItems().get(helpMenu.getItems().size() - 1);
aboutItem.setText("About ProB 2");
helpMenu.getItems().remove(aboutItem);
applicationMenu.getItems().set(0, aboutItem);
// Create Mac-style Window menu
Menu windowMenu = new Menu("Window");
windowMenu.getItems().addAll(tk.createMinimizeMenuItem(), tk.createZoomMenuItem(),
tk.createCycleWindowsItem(), new SeparatorMenuItem(), tk.createBringAllToFrontItem(),
new SeparatorMenuItem());
tk.autoAddWindowMenuItems(windowMenu);
this.getMenus().add(this.getMenus().size() - 1, windowMenu);
// Make this the global menu bar
tk.setGlobalMenuBar(this);
}
}
}
|
src/main/java/de/prob2/ui/menu/MenuController.java
|
package de.prob2.ui.menu;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.Optional;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import de.be4.classicalb.core.parser.exceptions.BException;
import de.codecentric.centerdevice.MenuToolkit;
import de.prob.scripting.Api;
import de.prob.statespace.AnimationSelector;
import de.prob.statespace.StateSpace;
import de.prob.statespace.Trace;
import de.prob2.ui.ProB2;
import de.prob2.ui.dotty.DottyStage;
import de.prob2.ui.formula.FormulaGenerator;
import de.prob2.ui.groovy.GroovyConsoleStage;
import de.prob2.ui.modelchecking.ModelcheckingController;
import de.prob2.ui.modelchecking.ModelcheckingStage;
import de.prob2.ui.preferences.PreferencesStage;
import de.prob2.ui.prob2fx.CurrentTrace;
import de.prob2.ui.states.BlacklistStage;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.Alert;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuBar;
import javafx.scene.control.MenuItem;
import javafx.scene.control.SeparatorMenuItem;
import javafx.scene.control.TextInputDialog;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import javafx.stage.Window;
@Singleton
public class MenuController extends MenuBar {
private final Api api;
private final AnimationSelector animationSelector;
private final CurrentTrace currentTrace;
private final BlacklistStage blacklistStage;
private final PreferencesStage preferencesStage;
private final ModelcheckingController modelcheckingController;
private final Stage mcheckStage;
private final FormulaGenerator formulaGenerator;
private final GroovyConsoleStage groovyConsoleStage;
private Window window;
private DottyStage dottyStage;
@FXML private MenuItem enterFormulaForVisualization;
@FXML
private void handleLoadDefault() {
FXMLLoader loader = ProB2.injector.getInstance(FXMLLoader.class);
loader.setLocation(getClass().getResource("../main.fxml"));
try {
loader.load();
} catch (IOException e) {
System.err.println("Failed to load FXML-File!");
e.printStackTrace();
}
Parent root = loader.getRoot();
Scene scene = new Scene(root, window.getHeight(), window.getWidth());
((Stage) window).setScene(scene);
}
@FXML
private void handleLoadPerspective() {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Open File");
fileChooser.getExtensionFilters().addAll(new FileChooser.ExtensionFilter("FXML Files", "*.fxml"));
File selectedFile = fileChooser.showOpenDialog(window);
if (selectedFile != null)
try {
FXMLLoader loader = ProB2.injector.getInstance(FXMLLoader.class);
loader.setLocation(new URL("file://" + selectedFile.getPath()));
loader.load();
Parent root = loader.getRoot();
Scene scene = new Scene(root, window.getHeight(), window.getWidth());
((Stage) window).setScene(scene);
} catch (IOException e) {
System.err.println("Failed to load FXML-File!");
e.printStackTrace();
}
}
@FXML
private void handleOpen(ActionEvent event) {
final FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("Open File");
fileChooser.getExtensionFilters().addAll(
// new FileChooser.ExtensionFilter("All Files", "*.*"),
new FileChooser.ExtensionFilter("Classical B Files", "*.mch", "*.ref", "*.imp")// ,
// new FileChooser.ExtensionFilter("EventB Files", "*.eventb", "*.bum",
// "*.buc"),
// new FileChooser.ExtensionFilter("CSP Files", "*.cspm")
);
final File selectedFile = fileChooser.showOpenDialog(this.window);
if (selectedFile == null) {
return;
}
switch (fileChooser.getSelectedExtensionFilter().getDescription()) {
case "Classical B Files":
final StateSpace newSpace;
try {
newSpace = this.api.b_load(selectedFile.getAbsolutePath());
} catch (IOException | BException e) {
e.printStackTrace();
Alert alert = new Alert(Alert.AlertType.ERROR, "Could not open file:\n" + e);
alert.getDialogPane().getStylesheets().add("prob.css");
alert.showAndWait();
return;
}
this.animationSelector.addNewAnimation(new Trace(newSpace));
modelcheckingController.resetView();
break;
default:
throw new IllegalStateException(
"Unknown file type selected: " + fileChooser.getSelectedExtensionFilter().getDescription());
}
}
@FXML
private void handleEditBlacklist(ActionEvent event) {
this.blacklistStage.show();
this.blacklistStage.toFront();
}
@FXML
private void handlePreferences(ActionEvent event) {
this.preferencesStage.show();
this.preferencesStage.toFront();
}
@FXML
private void handleFormulaInput(ActionEvent event) {
TextInputDialog dialog = new TextInputDialog();
dialog.setTitle("Enter Formula for Visualization");
dialog.setHeaderText("Enter Formula for Visualization");
dialog.setContentText("Enter Formula: ");
dialog.getDialogPane().getStylesheets().add("prob.css");
Optional<String> result = dialog.showAndWait();
if (result.isPresent()) {
formulaGenerator.parseAndShowFormula(result.get());
}
}
@FXML
private void handleModelCheck(ActionEvent event) {
this.mcheckStage.showAndWait();
this.mcheckStage.toFront();
}
@FXML
private void handleDotty(ActionEvent event) {
this.dottyStage.showAndWait();
}
@FXML
public void handleGroovyConsole(ActionEvent event) {
this.groovyConsoleStage.show();
this.groovyConsoleStage.toFront();
}
@FXML
public void initialize() {
this.sceneProperty().addListener((observable, from, to) -> {
if (to != null) {
to.windowProperty().addListener((observable1, from1, to1) -> {
this.window = to1;
this.mcheckStage.initOwner(this.window);
});
}
});
this.enterFormulaForVisualization.disableProperty().bind(currentTrace.existsProperty().not());
}
@Inject
private MenuController(final FXMLLoader loader, final Api api, final AnimationSelector animationSelector, final CurrentTrace currentTrace,
final BlacklistStage blacklistStage, final PreferencesStage preferencesStage,
final ModelcheckingStage modelcheckingStage, final ModelcheckingController modelcheckingController,
final FormulaGenerator formulaGenerator, final DottyStage dottyStage, final GroovyConsoleStage groovyConsoleStage) {
this.api = api;
this.animationSelector = animationSelector;
this.currentTrace = currentTrace;
this.blacklistStage = blacklistStage;
this.preferencesStage = preferencesStage;
this.formulaGenerator = formulaGenerator;
this.modelcheckingController = modelcheckingController;
this.mcheckStage = modelcheckingStage;
this.dottyStage = dottyStage;
this.groovyConsoleStage = groovyConsoleStage;
try {
loader.setLocation(getClass().getResource("menu.fxml"));
loader.setRoot(this);
loader.setController(this);
loader.load();
} catch (IOException e) {
e.printStackTrace();
}
if (System.getProperty("os.name", "").toLowerCase().contains("mac")) {
// Mac-specific menu stuff
this.setUseSystemMenuBar(true);
MenuToolkit tk = MenuToolkit.toolkit();
// Create Mac-style application menu
Menu applicationMenu = tk.createDefaultApplicationMenu("ProB 2");
this.getMenus().add(0, applicationMenu);
tk.setApplicationMenu(applicationMenu);
// Move About menu item from Help to application menu
Menu helpMenu = this.getMenus().get(this.getMenus().size() - 1);
MenuItem aboutItem = helpMenu.getItems().get(helpMenu.getItems().size() - 1);
aboutItem.setText("About ProB 2");
helpMenu.getItems().remove(aboutItem);
applicationMenu.getItems().set(0, aboutItem);
// Create Mac-style Window menu
Menu windowMenu = new Menu("Window");
windowMenu.getItems().addAll(tk.createMinimizeMenuItem(), tk.createZoomMenuItem(),
tk.createCycleWindowsItem(), new SeparatorMenuItem(), tk.createBringAllToFrontItem(),
new SeparatorMenuItem());
tk.autoAddWindowMenuItems(windowMenu);
this.getMenus().add(this.getMenus().size() - 1, windowMenu);
// Make this the global menu bar
tk.setGlobalMenuBar(this);
}
}
}
|
Fix width and height getting swapped when loading default perspective
|
src/main/java/de/prob2/ui/menu/MenuController.java
|
Fix width and height getting swapped when loading default perspective
|
|
Java
|
mpl-2.0
|
d87d3039f291f7ed9445be3503a63df15a294e10
| 0
|
naraink/openmrs-core,koskedk/openmrs-core,michaelhofer/openmrs-core,aj-jaswanth/openmrs-core,ern2/openmrs-core,donaldgavis/openmrs-core,ern2/openmrs-core,milankarunarathne/openmrs-core,trsorsimoII/openmrs-core,shiangree/openmrs-core,sravanthi17/openmrs-core,jvena1/openmrs-core,koskedk/openmrs-core,andyvand/OpenMRS,milankarunarathne/openmrs-core,donaldgavis/openmrs-core,ssmusoke/openmrs-core,andyvand/OpenMRS,kckc/openmrs-core,milankarunarathne/openmrs-core,Negatu/openmrs-core,sintjuri/openmrs-core,macorrales/openmrs-core,lbl52001/openmrs-core,dcmul/openmrs-core,WANeves/openmrs-core,iLoop2/openmrs-core,kristopherschmidt/openmrs-core,trsorsimoII/openmrs-core,sravanthi17/openmrs-core,kabariyamilind/openMRSDEV,jamesfeshner/openmrs-module,kabariyamilind/openMRSDEV,alexwind26/openmrs-core,sintjuri/openmrs-core,jembi/openmrs-core,maekstr/openmrs-core,macorrales/openmrs-core,ldf92/openmrs-core,lilo2k/openmrs-core,vinayvenu/openmrs-core,jvena1/openmrs-core,ssmusoke/openmrs-core,Openmrs-joel/openmrs-core,lbl52001/openmrs-core,joansmith/openmrs-core,macorrales/openmrs-core,joansmith/openmrs-core,maekstr/openmrs-core,aj-jaswanth/openmrs-core,MuhammadSafwan/Stop-Button-Ability,MitchellBot/openmrs-core,nilusi/Legacy-UI,jvena1/openmrs-core,alexwind26/openmrs-core,prisamuel/openmrs-core,andyvand/OpenMRS,MuhammadSafwan/Stop-Button-Ability,naraink/openmrs-core,MuhammadSafwan/Stop-Button-Ability,aboutdata/openmrs-core,lilo2k/openmrs-core,rbtracker/openmrs-core,trsorsimoII/openmrs-core,maekstr/openmrs-core,preethi29/openmrs-core,joansmith/openmrs-core,shiangree/openmrs-core,MitchellBot/openmrs-core,shiangree/openmrs-core,donaldgavis/openmrs-core,naraink/openmrs-core,Negatu/openmrs-core,prisamuel/openmrs-core,naraink/openmrs-core,kristopherschmidt/openmrs-core,sravanthi17/openmrs-core,Negatu/openmrs-core,siddharthkhabia/openmrs-core,jvena1/openmrs-core,ern2/openmrs-core,maany/openmrs-core,macorrales/openmrs-core,preethi29/openmrs-core,MitchellBot/openmrs-core,WANeves/openmrs-core,hoquangtruong/TestMylyn,lbl52001/openmrs-core,WANeves/openmrs-core,iLoop2/openmrs-core,dcmul/openmrs-core,Negatu/openmrs-core,foolchan2556/openmrs-core,Openmrs-joel/openmrs-core,spereverziev/openmrs-core,kigsmtua/openmrs-core,vinayvenu/openmrs-core,Negatu/openmrs-core,lilo2k/openmrs-core,hoquangtruong/TestMylyn,dlahn/openmrs-core,kckc/openmrs-core,preethi29/openmrs-core,Openmrs-joel/openmrs-core,aboutdata/openmrs-core,chethandeshpande/openmrs-core,chethandeshpande/openmrs-core,pselle/openmrs-core,siddharthkhabia/openmrs-core,koskedk/openmrs-core,nilusi/Legacy-UI,preethi29/openmrs-core,rbtracker/openmrs-core,dlahn/openmrs-core,maany/openmrs-core,jembi/openmrs-core,siddharthkhabia/openmrs-core,donaldgavis/openmrs-core,iLoop2/openmrs-core,kckc/openmrs-core,koskedk/openmrs-core,lilo2k/openmrs-core,rbtracker/openmrs-core,dlahn/openmrs-core,macorrales/openmrs-core,dcmul/openmrs-core,WANeves/openmrs-core,dcmul/openmrs-core,Openmrs-joel/openmrs-core,AbhijitParate/openmrs-core,jamesfeshner/openmrs-module,AbhijitParate/openmrs-core,koskedk/openmrs-core,ldf92/openmrs-core,asifur77/openmrs,Winbobob/openmrs-core,maekstr/openmrs-core,Openmrs-joel/openmrs-core,pselle/openmrs-core,sintjuri/openmrs-core,alexwind26/openmrs-core,kigsmtua/openmrs-core,Ch3ck/openmrs-core,lbl52001/openmrs-core,kigsmtua/openmrs-core,naraink/openmrs-core,dcmul/openmrs-core,aboutdata/openmrs-core,MitchellBot/openmrs-core,Ch3ck/openmrs-core,jcantu1988/openmrs-core,jembi/openmrs-core,maany/openmrs-core,MuhammadSafwan/Stop-Button-Ability,kabariyamilind/openMRSDEV,dlahn/openmrs-core,foolchan2556/openmrs-core,AbhijitParate/openmrs-core,siddharthkhabia/openmrs-core,sravanthi17/openmrs-core,asifur77/openmrs,Winbobob/openmrs-core,michaelhofer/openmrs-core,maekstr/openmrs-core,alexei-grigoriev/openmrs-core,dlahn/openmrs-core,MuhammadSafwan/Stop-Button-Ability,prisamuel/openmrs-core,shiangree/openmrs-core,hoquangtruong/TestMylyn,joansmith/openmrs-core,spereverziev/openmrs-core,hoquangtruong/TestMylyn,AbhijitParate/openmrs-core,asifur77/openmrs,ldf92/openmrs-core,WANeves/openmrs-core,iLoop2/openmrs-core,asifur77/openmrs,jamesfeshner/openmrs-module,aboutdata/openmrs-core,vinayvenu/openmrs-core,andyvand/OpenMRS,kigsmtua/openmrs-core,kigsmtua/openmrs-core,aj-jaswanth/openmrs-core,prisamuel/openmrs-core,MuhammadSafwan/Stop-Button-Ability,Winbobob/openmrs-core,alexei-grigoriev/openmrs-core,jembi/openmrs-core,spereverziev/openmrs-core,maany/openmrs-core,jembi/openmrs-core,ssmusoke/openmrs-core,sintjuri/openmrs-core,pselle/openmrs-core,geoff-wasilwa/openmrs-core,kristopherschmidt/openmrs-core,koskedk/openmrs-core,ern2/openmrs-core,donaldgavis/openmrs-core,geoff-wasilwa/openmrs-core,jvena1/openmrs-core,jembi/openmrs-core,MitchellBot/openmrs-core,jcantu1988/openmrs-core,chethandeshpande/openmrs-core,sintjuri/openmrs-core,ssmusoke/openmrs-core,Negatu/openmrs-core,sadhanvejella/openmrs,Ch3ck/openmrs-core,pselle/openmrs-core,kabariyamilind/openMRSDEV,rbtracker/openmrs-core,AbhijitParate/openmrs-core,chethandeshpande/openmrs-core,michaelhofer/openmrs-core,Ch3ck/openmrs-core,maany/openmrs-core,prisamuel/openmrs-core,Winbobob/openmrs-core,geoff-wasilwa/openmrs-core,ldf92/openmrs-core,milankarunarathne/openmrs-core,shiangree/openmrs-core,andyvand/OpenMRS,pselle/openmrs-core,milankarunarathne/openmrs-core,nilusi/Legacy-UI,foolchan2556/openmrs-core,iLoop2/openmrs-core,alexwind26/openmrs-core,rbtracker/openmrs-core,ldf92/openmrs-core,trsorsimoII/openmrs-core,hoquangtruong/TestMylyn,WANeves/openmrs-core,michaelhofer/openmrs-core,shiangree/openmrs-core,sadhanvejella/openmrs,chethandeshpande/openmrs-core,kckc/openmrs-core,iLoop2/openmrs-core,kabariyamilind/openMRSDEV,spereverziev/openmrs-core,asifur77/openmrs,kigsmtua/openmrs-core,sadhanvejella/openmrs,trsorsimoII/openmrs-core,jamesfeshner/openmrs-module,alexei-grigoriev/openmrs-core,maekstr/openmrs-core,vinayvenu/openmrs-core,foolchan2556/openmrs-core,ssmusoke/openmrs-core,Ch3ck/openmrs-core,siddharthkhabia/openmrs-core,alexei-grigoriev/openmrs-core,nilusi/Legacy-UI,ern2/openmrs-core,sadhanvejella/openmrs,aj-jaswanth/openmrs-core,jcantu1988/openmrs-core,jcantu1988/openmrs-core,lilo2k/openmrs-core,kristopherschmidt/openmrs-core,vinayvenu/openmrs-core,milankarunarathne/openmrs-core,alexwind26/openmrs-core,aj-jaswanth/openmrs-core,nilusi/Legacy-UI,kristopherschmidt/openmrs-core,siddharthkhabia/openmrs-core,joansmith/openmrs-core,sadhanvejella/openmrs,geoff-wasilwa/openmrs-core,sravanthi17/openmrs-core,michaelhofer/openmrs-core,spereverziev/openmrs-core,Winbobob/openmrs-core,naraink/openmrs-core,Winbobob/openmrs-core,sintjuri/openmrs-core,andyvand/OpenMRS,pselle/openmrs-core,geoff-wasilwa/openmrs-core,kckc/openmrs-core,jcantu1988/openmrs-core,kckc/openmrs-core,hoquangtruong/TestMylyn,foolchan2556/openmrs-core,preethi29/openmrs-core,alexei-grigoriev/openmrs-core,sadhanvejella/openmrs,lbl52001/openmrs-core,foolchan2556/openmrs-core,prisamuel/openmrs-core,lbl52001/openmrs-core,aboutdata/openmrs-core,jamesfeshner/openmrs-module,spereverziev/openmrs-core,alexei-grigoriev/openmrs-core,dcmul/openmrs-core,aboutdata/openmrs-core,nilusi/Legacy-UI,AbhijitParate/openmrs-core,lilo2k/openmrs-core
|
/**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs.api;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import org.hibernate.FlushMode;
import org.openmrs.Concept;
import org.openmrs.ConceptClass;
import org.openmrs.ConceptDatatype;
import org.openmrs.ConceptProposal;
import org.openmrs.EncounterType;
import org.openmrs.FieldType;
import org.openmrs.GlobalProperty;
import org.openmrs.ImplementationId;
import org.openmrs.Location;
import org.openmrs.MimeType;
import org.openmrs.OpenmrsObject;
import org.openmrs.PatientIdentifierType;
import org.openmrs.Privilege;
import org.openmrs.Role;
import org.openmrs.Tribe;
import org.openmrs.annotation.Authorized;
import org.openmrs.api.db.AdministrationDAO;
import org.openmrs.reporting.AbstractReportObject;
import org.openmrs.reporting.Report;
import org.openmrs.util.OpenmrsConstants;
import org.openmrs.util.PrivilegeConstants;
import org.openmrs.validator.ValidateUtil;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.validation.Errors;
/**
* Contains methods pertaining to doing some administrative tasks in OpenMRS
* <p>
* Use:<br/>
*
* <pre>
*
*
*
* List<GlobalProperty> globalProperties = Context.getAdministrationService().getGlobalProperties();
* </pre>
*
* @see org.openmrs.api.context.Context
*/
@Transactional
public interface AdministrationService extends OpenmrsService {
/**
* Used by Spring to set the specific/chosen database access implementation
*
* @param dao The dao implementation to use
*/
public void setAdministrationDAO(AdministrationDAO dao);
/**
* @deprecated use {@link org.openmrs.api.EncounterService#saveEncounterType(EncounterType)}
*/
public void createEncounterType(EncounterType encounterType) throws APIException;
/**
* @deprecated use {@link org.openmrs.api.EncounterService#saveEncounterType(EncounterType)}
*/
public void updateEncounterType(EncounterType encounterType) throws APIException;
/**
* @deprecated use {@link org.openmrs.api.EncounterService#purgeEncounterType(EncounterType)}
*/
public void deleteEncounterType(EncounterType encounterType) throws APIException;
/**
* @see org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)
* @deprecated replaced by
* {@link org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)}
*/
public void createPatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException;
/**
* @see org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)
* @deprecated replaced by
* {@link org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)}
*/
public void updatePatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException;
/**
* @see org.openmrs.api.PatientService#purgePatientIdentifierType(PatientIdentifierType)
* @deprecated replaced by
* {@link org.openmrs.api.PatientService#purgePatientIdentifierType(PatientIdentifierType)}
*/
public void deletePatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException;
/**
* @deprecated The Tribe object is no longer supported. Install the Tribe module
*/
public void createTribe(Tribe tribe) throws APIException;
/**
* @deprecated The Tribe object is no longer supported. Install the Tribe module
*/
public void updateTribe(Tribe tribe) throws APIException;
/**
* @deprecated The Tribe object is no longer supported. Install the Tribe module
*/
public void deleteTribe(Tribe tribe) throws APIException;
/**
* @deprecated The Tribe object is no longer supported. Install the Tribe module
*/
public void retireTribe(Tribe tribe) throws APIException;
/**
* @deprecated The Tribe object is no longer supported. Install the Tribe module
*/
public void unretireTribe(Tribe tribe) throws APIException;
/**
* @deprecated use {@link FormService#saveFieldType(FieldType)}
*/
public void createFieldType(FieldType fieldType) throws APIException;
/**
* @deprecated use {@link FormService#saveFieldType(FieldType)}
*/
public void updateFieldType(FieldType fieldType) throws APIException;
/**
* @deprecated use {@link FormService#purgeFieldType(FieldType)}
*/
public void deleteFieldType(FieldType fieldType) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.ObsService#saveMimeType(MimeType)}
**/
@Authorized(OpenmrsConstants.PRIV_MANAGE_MIME_TYPES)
public void createMimeType(MimeType mimeType) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.ObsService#saveMimeType(MimeType)}
*/
@Authorized(OpenmrsConstants.PRIV_MANAGE_MIME_TYPES)
public void updateMimeType(MimeType mimeType) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.ObsService#purgeMimeType(MimeType)}
*/
@Authorized(OpenmrsConstants.PRIV_MANAGE_MIME_TYPES)
public void deleteMimeType(MimeType mimeType) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.LocationService#saveLocation(Location)}
*/
public void createLocation(Location location) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.LocationService#saveLocation(Location)}
*/
public void updateLocation(Location location) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.LocationService#purgeLocation(Location)}
*/
public void deleteLocation(Location location) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#saveRole(Role)}
*/
public void createRole(Role role) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#saveRole(Role)}
*/
public void updateRole(Role role) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#purgeRole(Role)}
*/
public void deleteRole(Role role) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#savePrivilege(Privilege)}
*/
public void createPrivilege(Privilege privilege) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#savePrivilege(Privilege)}
*/
public void updatePrivilege(Privilege privilege) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#purgePrivilege(Privilege)}
*/
public void deletePrivilege(Privilege privilege) throws APIException;
/**
* Create a new ConceptClass
*
* @param cc ConceptClass to create
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptClass(ConceptClass)}
*/
public void createConceptClass(ConceptClass cc) throws APIException;
/**
* Update ConceptClass
*
* @param cc ConceptClass to update
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptClass(ConceptClass)}
*/
public void updateConceptClass(ConceptClass cc) throws APIException;
/**
* Delete ConceptClass
*
* @param cc ConceptClass to delete
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#purgeConceptClass(ConceptClass)}
*/
public void deleteConceptClass(ConceptClass cc) throws APIException;
/**
* Create a new ConceptDatatype
*
* @param cd ConceptDatatype to create
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptDatatype(ConceptDatatype)}
*/
public void createConceptDatatype(ConceptDatatype cd) throws APIException;
/**
* Update ConceptDatatype
*
* @param cd ConceptDatatype to update
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptDatatype(ConceptDatatype)}
*/
public void updateConceptDatatype(ConceptDatatype cd) throws APIException;
/**
* Delete ConceptDatatype
*
* @param cd ConceptDatatype to delete
* @throws APIException
* @deprecated use {@link org.openmrs.api#deleteConceptDatatype(ConceptDatatype)}
*/
public void deleteConceptDatatype(ConceptDatatype cd) throws APIException;
/**
* Create a new Report
*
* @param report Report to create
* @throws APIException
* @deprecated see reportingcompatibility module
*/
@Deprecated
public void createReport(Report report) throws APIException;
/**
* Update Report
*
* @param report Report to update
* @deprecated see reportingcompatibility module
* @throws APIException
*/
@Deprecated
public void updateReport(Report report) throws APIException;
/**
* Delete Report
*
* @param report Report to delete
* @throws APIException
* @deprecated see reportingcompatibility module
*/
@Deprecated
public void deleteReport(Report report) throws APIException;
/**
* Create a new Report Object
*
* @param reportObject Report Object to create
* @deprecated see reportingcompatibility module
* @throws APIException
*/
@Deprecated
public void createReportObject(AbstractReportObject reportObject) throws APIException;
/**
* Update Report Object
*
* @param reportObject the Report Object to update
* @deprecated see reportingcompatibility module
* @throws APIException
*/
@Deprecated
public void updateReportObject(AbstractReportObject reportObject) throws APIException;
/**
* Delete Report Object
*
* @param reportObjectId Internal identifier for the Report Object to delete
* @deprecated see reportingcompatibility module
* @throws APIException
*/
@Deprecated
public void deleteReportObject(Integer reportObjectId) throws APIException;
/**
* Iterates over the words in names and synonyms (for each locale) and updates the concept word
* business table
*
* @param concept
* @throws APIException
* @deprecated moved to {@link org.openmrs.api.ConceptService#updateConceptWord(Concept)}
*/
public void updateConceptWord(Concept concept) throws APIException;
/**
* Iterates over all concepts calling updateConceptWord(concept)
*
* @throws APIException
* @deprecated moved to {@link org.openmrs.api.ConceptService#updateConceptWords()}
*/
public void updateConceptWords() throws APIException;
/**
* Iterates over all concepts with conceptIds between <code>conceptIdStart</code> and
* <code>conceptIdEnd</code> (inclusive) calling updateConceptWord(concept)
*
* @throws APIException
* @deprecated moved to
* {@link org.openmrs.api.ConceptService#updateConceptWords(Integer, Integer)}
*/
public void updateConceptWords(Integer conceptIdStart, Integer conceptIdEnd) throws APIException;
/**
* Updates the concept set derived business table for this concept (bursting the concept sets)
*
* @param concept
* @throws APIException
* @deprecated moved to {@link org.openmrs.api.ConceptService#updateConceptSetDerived(Concept)};
*/
public void updateConceptSetDerived(Concept concept) throws APIException;
/**
* Iterates over all concepts calling updateConceptSetDerived(concept)
*
* @throws APIException
* @deprecated moved to {@link org.openmrs.api.ConceptService#updateConceptSetDerived()}
*/
public void updateConceptSetDerived() throws APIException;
/**
* Create a concept proposal
*
* @param cp
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptProposal(ConceptProposal)}
*/
public void createConceptProposal(ConceptProposal cp) throws APIException;
/**
* Update a concept proposal
*
* @param cp
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptProposal(ConceptProposal)}
*/
public void updateConceptProposal(ConceptProposal cp) throws APIException;
/**
* maps a concept proposal to a concept
*
* @param cp
* @param mappedConcept
* @throws APIException
* @deprecated moved to
* {@link org.openmrs.api.ConceptService#mapConceptProposalToConcept(ConceptProposal, Concept)}
*/
public void mapConceptProposalToConcept(ConceptProposal cp, Concept mappedConcept) throws APIException;
/**
* rejects a concept proposal
*
* @param cp
* @deprecated moved to
* {@link org.openmrs.api.ConceptService#rejectConceptProposal(ConceptProposal)}
*/
public void rejectConceptProposal(ConceptProposal cp);
/**
* @param site
* @param start
* @param count
* @deprecated use the mrngen module instead
*/
public void mrnGeneratorLog(String site, Integer start, Integer count);
/**
* @deprecated use the mrngen module instead
*/
@Transactional(readOnly = true)
public Collection<?> getMRNGeneratorLog();
/**
* Get a global property by its uuid. There should be only one of these in the database (well,
* in the world actually). If multiple are found, an error is thrown.
*
* @return the global property matching the given uuid
* @should find object given valid uuid
* @should return null if no object found with given uuid
*/
@Transactional(readOnly = true)
public GlobalProperty getGlobalPropertyByUuid(String uuid) throws APIException;
/**
* Get a listing or important variables used in openmrs
*
* @return a map from variable name to variable value
* @should return all registered system variables
*/
@Transactional(readOnly = true)
@Authorized(PrivilegeConstants.VIEW_ADMIN_FUNCTIONS)
public SortedMap<String, String> getSystemVariables() throws APIException;
/**
* Get a map of all the System Information. Java, user, time, runtime properties, etc
*
* @return a map from variable name to a map of the information
* @should return all system information
*/
@Transactional(readOnly = true)
@Authorized(PrivilegeConstants.VIEW_ADMIN_FUNCTIONS)
public Map<String, Map<String, String>> getSystemInformation() throws APIException;
/**
* Gets the global property that has the given <code>propertyName</code>.
* <p>
* If <code>propertyName</code> is not found in the list of Global Properties currently in the
* database, a null value is returned. This method should not have any authorization check.
*
* @param propertyName property key to look for
* @return value of property returned or null if none
* @see #getGlobalProperty(String, String)
* @should not fail with null propertyName
* @should get property value given valid property name
* @should get property in case insensitive way
*/
@Transactional(readOnly = true)
public String getGlobalProperty(String propertyName) throws APIException;
/**
* Gets the global property that has the given <code>propertyName</code>
* <p>
* If propertyName is not found in the list of Global Properties currently in the database, a
* <code>defaultValue</code> is returned
* <p>
* This method should not have any authorization check
*
* @param propertyName property key to look for
* @param defaultValue value to return if propertyName is not found
* @return value of propertyName property or defaultValue if none
* @should return default value if property name does not exist
* @should not fail with null default value
*/
@Transactional(readOnly = true)
public String getGlobalProperty(String propertyName, String defaultValue) throws APIException;
/**
* Gets the global property that has the given <code>propertyName</code>
*
* @param propertyName property key to look for
* @return the global property that matches the given <code>propertyName</code>
* @should return null when no global property match given property name
*/
@Transactional(readOnly = true)
public GlobalProperty getGlobalPropertyObject(String propertyName);
/**
* Gets all global properties that begin with <code>prefix</code>.
*
* @param prefix The beginning of the property name to match.
* @return a <code>List</code> of <code>GlobalProperty</code>s that match <code>prefix</code>
* @since 1.5
* @should return all relevant global properties in the database
*/
@Transactional(readOnly = true)
public List<GlobalProperty> getGlobalPropertiesByPrefix(String prefix);
/**
* Gets all global properties that end with <code>suffix</code>.
*
* @param prefix The end of the property name to match.
* @return a <code>List</code> of <code>GlobalProperty</code>s that match <code>.*suffix</code>
* @since 1.6
* @should return all relevant global properties in the database
*/
@Transactional(readOnly = true)
public List<GlobalProperty> getGlobalPropertiesBySuffix(String suffix);
/**
* Get a list of all global properties in the system
*
* @return list of global properties
* @should return all global properties in the database
*/
@Transactional(readOnly = true)
@Authorized(PrivilegeConstants.VIEW_GLOBAL_PROPERTIES)
public List<GlobalProperty> getAllGlobalProperties() throws APIException;
/**
* @deprecated use {@link #getAllGlobalProperties()}
*/
@Transactional(readOnly = true)
public List<GlobalProperty> getGlobalProperties();
/**
* Save the given list of global properties to the database.
*
* @param props list of GlobalProperty objects to save
* @return the saved global properties
* @should save all global properties to the database
* @should not fail with empty list
* @should assign uuid to all new properties
* @should save properties with case difference only
*/
@Authorized(PrivilegeConstants.MANAGE_GLOBAL_PROPERTIES)
public List<GlobalProperty> saveGlobalProperties(List<GlobalProperty> props) throws APIException;
/**
* @deprecated use {@link #saveGlobalProperties(List)}
*/
public void setGlobalProperties(List<GlobalProperty> props);
/**
* Completely remove the given global property from the database
*
* @param globalProperty the global property to delete/remove from the database
* @throws APIException
* @should delete global property from database
*/
@Authorized(PrivilegeConstants.PURGE_GLOBAL_PROPERTIES)
public void purgeGlobalProperty(GlobalProperty globalProperty) throws APIException;
/**
* Completely remove the given global properties from the database
*
* @param globalProperties the global properties to delete/remove from the database
* @throws APIException
* @should delete global properties from database
*/
@Authorized(PrivilegeConstants.PURGE_GLOBAL_PROPERTIES)
public void purgeGlobalProperties(List<GlobalProperty> globalProperties) throws APIException;
/**
* Use
*
* <pre>
* purgeGlobalProperty(new GlobalProperty(propertyName));
* </pre>
*
* @deprecated use {@link #purgeGlobalProperty(GlobalProperty)}
*/
public void deleteGlobalProperty(String propertyName);
/**
* Use
*
* <pre>
* saveGlobalProperty(new GlobalProperty(propertyName, propertyValue));
* </pre>
*
* @deprecated use #saveGlobalProperty(GlobalProperty)
*/
public void setGlobalProperty(String propertyName, String propertyValue);
/**
* Save the given global property to the database
*
* @param gp global property to save
* @return the saved global property
* @throws APIException
* @should create global property in database
* @should overwrite global property if exists
* @should not allow different properties to have the same string with different case
* @should save a global property whose typed value is handled by a custom datatype
*/
@Authorized(PrivilegeConstants.MANAGE_GLOBAL_PROPERTIES)
public GlobalProperty saveGlobalProperty(GlobalProperty gp) throws APIException;
/**
* @deprecated use {@link #saveGlobalProperty(GlobalProperty)}
*/
public void setGlobalProperty(GlobalProperty gp);
/**
* @deprecated use {@link #saveGlobalProperty(GlobalProperty)}
*/
public void addGlobalProperty(String propertyName, String propertyValue);
/**
* @deprecated use {@link #saveGlobalProperty(GlobalProperty)}
*/
public void addGlobalProperty(GlobalProperty gp);
/**
* Allows code to be notified when a global property is created/edited/deleted.
*
* @see GlobalPropertyListener
* @param listener The listener to register
*/
public void addGlobalPropertyListener(GlobalPropertyListener listener);
/**
* Removes a GlobalPropertyListener previously registered by
* {@link #addGlobalPropertyListener(GlobalPropertyListener)}
*
* @param listener
*/
public void removeGlobalPropertyListener(GlobalPropertyListener listener);
/**
* Runs the <code>sql</code> on the database. If <code>selectOnly</code> is flagged then any
* non-select sql statements will be rejected.
*
* @param sql
* @param selectOnly
* @return ResultSet
* @throws APIException
* @should execute sql containing group by
*/
@Authorized(PrivilegeConstants.SQL_LEVEL_ACCESS)
public List<List<Object>> executeSQL(String sql, boolean selectOnly) throws APIException;
/**
* Get the implementation id stored for this server Returns null if no implementation id has
* been successfully set yet
*
* @return ImplementationId object that is this implementation's unique id
* @should return null if no implementation id is defined yet
*/
@Transactional(readOnly = true)
@Authorized(PrivilegeConstants.MANAGE_IMPLEMENTATION_ID)
public ImplementationId getImplementationId() throws APIException;
/**
* Set the given <code>implementationId</code> as this implementation's unique id
*
* @param implementationId the ImplementationId to save
* @throws APIException if implementationId is empty or is invalid according to central id
* server
* @should create implementation id in database
* @should overwrite implementation id in database if exists
* @should not fail if given implementationId is null
* @should throw APIException if given empty implementationId object
* @should throw APIException if given a caret in the implementationId code
* @should throw APIException if given a pipe in the implementationId code
* @should set uuid on implementation id global property
*/
@Authorized(PrivilegeConstants.MANAGE_IMPLEMENTATION_ID)
public void setImplementationId(ImplementationId implementationId) throws APIException;
/**
* Gets the list of locales which the administrator has allowed for use on the system. This is
* specified with a global property named
* {@link OpenmrsConstants#GLOBAL_PROPERTY_LOCALE_ALLOWED_LIST}.
*
* @return list of allowed locales
* @should return at least one locale if no locales defined in database yet
* @should not fail if not global property for locales allowed defined yet
* @should not return duplicates even if the global property has them
*/
@Transactional(readOnly = true)
public List<Locale> getAllowedLocales();
/**
* Gets the list of locales for which localized messages are available for the user interface
* (presentation layer). This set includes all the available locales (as indicated by the
* MessageSourceService) filtered by the allowed locales (as indicated by this
* AdministrationService).
*
* @return list of allowed presentation locales TODO change this return type to list?
* @should return at least one locale if no locales defined in database yet
* @should not return more locales than message source service locales
*/
@Transactional(readOnly = true)
public Set<Locale> getPresentationLocales();
/**
* Returns a global property according to the type specified
*
* @param <T>
* @param propertyName
* @should get property value in the proper type specified
* @should return default value if property name does not exist
* @return property value in the type of the default value
* @since 1.7
*/
public <T> T getGlobalPropertyValue(String propertyName, T defaultValue) throws APIException;
/**
* @param aClass class of object getting length for
* @param fieldName name of the field to get the length for
* @return the max field length of a property
*/
@Transactional(readOnly = true)
public int getMaximumPropertyLength(Class<? extends OpenmrsObject> aClass, String fieldName);
/**
* Performs validation in the manual flush mode to prevent any premature flushes.
* <p>
* Used by {@link ValidateUtil#validate(Object)}.
*
* @see FlushMode
* @since 1.9
* @param object
* @param errors
* @should pass for a valid object
* @should fail for an invalid object
*/
@Transactional(readOnly = true)
public void validate(Object object, Errors errors) throws APIException;
}
|
api/src/main/java/org/openmrs/api/AdministrationService.java
|
/**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs.api;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import org.hibernate.FlushMode;
import org.openmrs.Concept;
import org.openmrs.ConceptClass;
import org.openmrs.ConceptDatatype;
import org.openmrs.ConceptProposal;
import org.openmrs.EncounterType;
import org.openmrs.FieldType;
import org.openmrs.GlobalProperty;
import org.openmrs.ImplementationId;
import org.openmrs.Location;
import org.openmrs.MimeType;
import org.openmrs.OpenmrsObject;
import org.openmrs.PatientIdentifierType;
import org.openmrs.Privilege;
import org.openmrs.Role;
import org.openmrs.Tribe;
import org.openmrs.annotation.Authorized;
import org.openmrs.api.db.AdministrationDAO;
import org.openmrs.reporting.AbstractReportObject;
import org.openmrs.reporting.Report;
import org.openmrs.util.OpenmrsConstants;
import org.openmrs.util.PrivilegeConstants;
import org.openmrs.validator.ValidateUtil;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.validation.Errors;
/**
* Contains methods pertaining to doing some administrative tasks in OpenMRS
* <p>
* Use:<br/>
*
* <pre>
*
*
*
* List<GlobalProperty> globalProperties = Context.getAdministrationService().getGlobalProperties();
* </pre>
*
* @see org.openmrs.api.context.Context
*/
@Transactional
public interface AdministrationService extends OpenmrsService {
/**
* Used by Spring to set the specific/chosen database access implementation
*
* @param dao The dao implementation to use
*/
public void setAdministrationDAO(AdministrationDAO dao);
/**
* @deprecated use {@link org.openmrs.api.EncounterService#saveEncounterType(EncounterType)}
*/
public void createEncounterType(EncounterType encounterType) throws APIException;
/**
* @deprecated use {@link org.openmrs.api.EncounterService#saveEncounterType(EncounterType)}
*/
public void updateEncounterType(EncounterType encounterType) throws APIException;
/**
* @deprecated use {@link org.openmrs.api.EncounterService#purgeEncounterType(EncounterType)}
*/
public void deleteEncounterType(EncounterType encounterType) throws APIException;
/**
* @see org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)
* @deprecated replaced by
* {@link org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)}
*/
public void createPatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException;
/**
* @see org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)
* @deprecated replaced by
* {@link org.openmrs.api.PatientService#savePatientIdentifierType(PatientIdentifierType)}
*/
public void updatePatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException;
/**
* @see org.openmrs.api.PatientService#purgePatientIdentifierType(PatientIdentifierType)
* @deprecated replaced by
* {@link org.openmrs.api.PatientService#purgePatientIdentifierType(PatientIdentifierType)}
*/
public void deletePatientIdentifierType(PatientIdentifierType patientIdentifierType) throws APIException;
/**
* @deprecated The Tribe object is no longer supported. Install the Tribe module
*/
public void createTribe(Tribe tribe) throws APIException;
/**
* @deprecated The Tribe object is no longer supported. Install the Tribe module
*/
public void updateTribe(Tribe tribe) throws APIException;
/**
* @deprecated The Tribe object is no longer supported. Install the Tribe module
*/
public void deleteTribe(Tribe tribe) throws APIException;
/**
* @deprecated The Tribe object is no longer supported. Install the Tribe module
*/
public void retireTribe(Tribe tribe) throws APIException;
/**
* @deprecated The Tribe object is no longer supported. Install the Tribe module
*/
public void unretireTribe(Tribe tribe) throws APIException;
/**
* @deprecated use {@link FormService#saveFieldType(FieldType)}
*/
public void createFieldType(FieldType fieldType) throws APIException;
/**
* @deprecated use {@link FormService#saveFieldType(FieldType)}
*/
public void updateFieldType(FieldType fieldType) throws APIException;
/**
* @deprecated use {@link FormService#purgeFieldType(FieldType)}
*/
public void deleteFieldType(FieldType fieldType) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.ObsService#saveMimeType(MimeType)}
**/
@Authorized(OpenmrsConstants.PRIV_MANAGE_MIME_TYPES)
public void createMimeType(MimeType mimeType) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.ObsService#saveMimeType(MimeType)}
*/
@Authorized(OpenmrsConstants.PRIV_MANAGE_MIME_TYPES)
public void updateMimeType(MimeType mimeType) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.ObsService#purgeMimeType(MimeType)}
*/
@Authorized(OpenmrsConstants.PRIV_MANAGE_MIME_TYPES)
public void deleteMimeType(MimeType mimeType) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.LocationService#saveLocation(Location)}
*/
public void createLocation(Location location) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.LocationService#saveLocation(Location)}
*/
public void updateLocation(Location location) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.LocationService#purgeLocation(Location)}
*/
public void deleteLocation(Location location) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#saveRole(Role)}
*/
public void createRole(Role role) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#saveRole(Role)}
*/
public void updateRole(Role role) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#purgeRole(Role)}
*/
public void deleteRole(Role role) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#savePrivilege(Privilege)}
*/
public void createPrivilege(Privilege privilege) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#savePrivilege(Privilege)}
*/
public void updatePrivilege(Privilege privilege) throws APIException;
/**
* @deprecated see {@link org.openmrs.api.UserService#purgePrivilege(Privilege)}
*/
public void deletePrivilege(Privilege privilege) throws APIException;
/**
* Create a new ConceptClass
*
* @param cc ConceptClass to create
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptClass(ConceptClass)}
*/
public void createConceptClass(ConceptClass cc) throws APIException;
/**
* Update ConceptClass
*
* @param cc ConceptClass to update
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptClass(ConceptClass)}
*/
public void updateConceptClass(ConceptClass cc) throws APIException;
/**
* Delete ConceptClass
*
* @param cc ConceptClass to delete
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#purgeConceptClass(ConceptClass)}
*/
public void deleteConceptClass(ConceptClass cc) throws APIException;
/**
* Create a new ConceptDatatype
*
* @param cd ConceptDatatype to create
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptDatatype(ConceptDatatype)}
*/
public void createConceptDatatype(ConceptDatatype cd) throws APIException;
/**
* Update ConceptDatatype
*
* @param cd ConceptDatatype to update
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptDatatype(ConceptDatatype)}
*/
public void updateConceptDatatype(ConceptDatatype cd) throws APIException;
/**
* Delete ConceptDatatype
*
* @param cd ConceptDatatype to delete
* @throws APIException
* @deprecated use {@link org.openmrs.api#deleteConceptDatatype(ConceptDatatype)}
*/
public void deleteConceptDatatype(ConceptDatatype cd) throws APIException;
/**
* Create a new Report
*
* @param report Report to create
* @throws APIException
* @deprecated see reportingcompatibility module
*/
@Deprecated
public void createReport(Report report) throws APIException;
/**
* Update Report
*
* @param report Report to update
* @deprecated see reportingcompatibility module
* @throws APIException
*/
@Deprecated
public void updateReport(Report report) throws APIException;
/**
* Delete Report
*
* @param report Report to delete
* @throws APIException
* @deprecated see reportingcompatibility module
*/
@Deprecated
public void deleteReport(Report report) throws APIException;
/**
* Create a new Report Object
*
* @param reportObject Report Object to create
* @deprecated see reportingcompatibility module
* @throws APIException
*/
@Deprecated
public void createReportObject(AbstractReportObject reportObject) throws APIException;
/**
* Update Report Object
*
* @param reportObject the Report Object to update
* @deprecated see reportingcompatibility module
* @throws APIException
*/
@Deprecated
public void updateReportObject(AbstractReportObject reportObject) throws APIException;
/**
* Delete Report Object
*
* @param reportObjectId Internal identifier for the Report Object to delete
* @deprecated see reportingcompatibility module
* @throws APIException
*/
@Deprecated
public void deleteReportObject(Integer reportObjectId) throws APIException;
/**
* Iterates over the words in names and synonyms (for each locale) and updates the concept word
* business table
*
* @param concept
* @throws APIException
* @deprecated moved to {@link org.openmrs.api.ConceptService#updateConceptWord(Concept)}
*/
public void updateConceptWord(Concept concept) throws APIException;
/**
* Iterates over all concepts calling updateConceptWord(concept)
*
* @throws APIException
* @deprecated moved to {@link org.openmrs.api.ConceptService#updateConceptWords()}
*/
public void updateConceptWords() throws APIException;
/**
* Iterates over all concepts with conceptIds between <code>conceptIdStart</code> and
* <code>conceptIdEnd</code> (inclusive) calling updateConceptWord(concept)
*
* @throws APIException
* @deprecated moved to
* {@link org.openmrs.api.ConceptService#updateConceptWords(Integer, Integer)}
*/
public void updateConceptWords(Integer conceptIdStart, Integer conceptIdEnd) throws APIException;
/**
* Updates the concept set derived business table for this concept (bursting the concept sets)
*
* @param concept
* @throws APIException
* @deprecated moved to {@link org.openmrs.api.ConceptService#updateConceptSetDerived(Concept)};
*/
public void updateConceptSetDerived(Concept concept) throws APIException;
/**
* Iterates over all concepts calling updateConceptSetDerived(concept)
*
* @throws APIException
* @deprecated moved to {@link org.openmrs.api.ConceptService#updateConceptSetDerived()}
*/
public void updateConceptSetDerived() throws APIException;
/**
* Create a concept proposal
*
* @param cp
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptProposal(ConceptProposal)}
*/
public void createConceptProposal(ConceptProposal cp) throws APIException;
/**
* Update a concept proposal
*
* @param cp
* @throws APIException
* @deprecated use {@link org.openmrs.api.ConceptService#saveConceptProposal(ConceptProposal)}
*/
public void updateConceptProposal(ConceptProposal cp) throws APIException;
/**
* maps a concept proposal to a concept
*
* @param cp
* @param mappedConcept
* @throws APIException
* @deprecated moved to
* {@link org.openmrs.api.ConceptService#mapConceptProposalToConcept(ConceptProposal, Concept)}
*/
public void mapConceptProposalToConcept(ConceptProposal cp, Concept mappedConcept) throws APIException;
/**
* rejects a concept proposal
*
* @param cp
* @deprecated moved to
* {@link org.openmrs.api.ConceptService#rejectConceptProposal(ConceptProposal)}
*/
public void rejectConceptProposal(ConceptProposal cp);
/**
* @param site
* @param start
* @param count
* @deprecated use the mrngen module instead
*/
public void mrnGeneratorLog(String site, Integer start, Integer count);
/**
* @deprecated use the mrngen module instead
*/
@Transactional(readOnly = true)
public Collection<?> getMRNGeneratorLog();
/**
* Get a global property by its uuid. There should be only one of these in the database (well,
* in the world actually). If multiple are found, an error is thrown.
*
* @return the global property matching the given uuid
* @should find object given valid uuid
* @should return null if no object found with given uuid
*/
@Transactional(readOnly = true)
public GlobalProperty getGlobalPropertyByUuid(String uuid) throws APIException;
/**
* Get a listing or important variables used in openmrs
*
* @return a map from variable name to variable value
* @should return all registered system variables
*/
@Transactional(readOnly = true)
@Authorized(PrivilegeConstants.VIEW_ADMIN_FUNCTIONS)
public SortedMap<String, String> getSystemVariables() throws APIException;
/**
* Get a map of all the System Information. Java, user, time, runtime properties, etc
*
* @return a map from variable name to a map of the information
* @should return all system information
*/
@Transactional(readOnly = true)
@Authorized(PrivilegeConstants.VIEW_ADMIN_FUNCTIONS)
public Map<String, Map<String, String>> getSystemInformation() throws APIException;
/**
* Gets the global property that has the given <code>propertyName</code>.
* <p>
* If <code>propertyName</code> is not found in the list of Global Properties currently in the
* database, a null value is returned. This method should not have any authorization check.
*
* @param propertyName property key to look for
* @return value of property returned or null if none
* @see #getGlobalProperty(String, String)
* @should not fail with null propertyName
* @should get property value given valid property name
* @should get property in case insensitive way
*/
@Transactional(readOnly = true)
public String getGlobalProperty(String propertyName) throws APIException;
/**
* Gets the global property that has the given <code>propertyName</code>
* <p>
* If propertyName is not found in the list of Global Properties currently in the database, a
* <code>defaultValue</code> is returned
* <p>
* This method should not have any authorization check
*
* @param propertyName property key to look for
* @param defaultValue value to return if propertyName is not found
* @return value of propertyName property or defaultValue if none
* @should return default value if property name does not exist
* @should not fail with null default value
*/
@Transactional(readOnly = true)
public String getGlobalProperty(String propertyName, String defaultValue) throws APIException;
/**
* Gets the global property that has the given <code>propertyName</code>
*
* @param propertyName property key to look for
* @return the global property that matches the given <code>propertyName</code>
* @should return null when no global property match given property name
*/
@Transactional(readOnly = true)
public GlobalProperty getGlobalPropertyObject(String propertyName);
/**
* Gets all global properties that begin with <code>prefix</code>.
*
* @param prefix The beginning of the property name to match.
* @return a <code>List</code> of <code>GlobalProperty</code>s that match <code>prefix</code>
* @since 1.5
* @should return all relevant global properties in the database
*/
@Transactional(readOnly = true)
public List<GlobalProperty> getGlobalPropertiesByPrefix(String prefix);
/**
* Gets all global properties that end with <code>suffix</code>.
*
* @param prefix The end of the property name to match.
* @return a <code>List</code> of <code>GlobalProperty</code>s that match <code>.*suffix</code>
* @since 1.6
* @should return all relevant global properties in the database
*/
@Transactional(readOnly = true)
public List<GlobalProperty> getGlobalPropertiesBySuffix(String suffix);
/**
* Get a list of all global properties in the system
*
* @return list of global properties
* @should return all global properties in the database
*/
@Transactional(readOnly = true)
@Authorized(PrivilegeConstants.VIEW_GLOBAL_PROPERTIES)
public List<GlobalProperty> getAllGlobalProperties() throws APIException;
/**
* @deprecated use {@link #getAllGlobalProperties()}
*/
@Transactional(readOnly = true)
public List<GlobalProperty> getGlobalProperties();
/**
* Save the given list of global properties to the database.
*
* @param props list of GlobalProperty objects to save
* @return the saved global properties
* @should save all global properties to the database
* @should not fail with empty list
* @should assign uuid to all new properties
* @should save properties with case difference only
*/
@Authorized(PrivilegeConstants.MANAGE_GLOBAL_PROPERTIES)
public List<GlobalProperty> saveGlobalProperties(List<GlobalProperty> props) throws APIException;
/**
* @deprecated use {@link #saveGlobalProperties(List)}
*/
public void setGlobalProperties(List<GlobalProperty> props);
/**
* Completely remove the given global property from the database
*
* @param globalProperty the global property to delete/remove from the database
* @throws APIException
* @should delete global property from database
*/
@Authorized(PrivilegeConstants.PURGE_GLOBAL_PROPERTIES)
public void purgeGlobalProperty(GlobalProperty globalProperty) throws APIException;
/**
* Completely remove the given global properties from the database
*
* @param globalProperties the global properties to delete/remove from the database
* @throws APIException
* @should delete global properties from database
*/
@Authorized(PrivilegeConstants.PURGE_GLOBAL_PROPERTIES)
public void purgeGlobalProperties(List<GlobalProperty> globalProperties) throws APIException;
/**
* Use
*
* <pre>
* purgeGlobalProperty(new GlobalProperty(propertyName));
* </pre>
*
* @deprecated use {@link #purgeGlobalProperty(GlobalProperty)}
*/
public void deleteGlobalProperty(String propertyName);
/**
* Use
*
* <pre>
* saveGlobalProperty(new GlobalProperty(propertyName, propertyValue));
* </pre>
*
* @deprecated use #saveGlobalProperty(GlobalProperty)
*/
public void setGlobalProperty(String propertyName, String propertyValue);
/**
* Save the given global property to the database
*
* @param gp global property to save
* @return the saved global property
* @throws APIException
* @should create global property in database
* @should overwrite global property if exists
* @should not allow different properties to have the same string with different case
* @should save a global property whose typed value is handled by a custom datatype
*/
@Authorized(PrivilegeConstants.MANAGE_GLOBAL_PROPERTIES)
public GlobalProperty saveGlobalProperty(GlobalProperty gp) throws APIException;
/**
* @deprecated use {@link #saveGlobalProperty(GlobalProperty)}
*/
public void setGlobalProperty(GlobalProperty gp);
/**
* @deprecated use {@link #saveGlobalProperty(GlobalProperty)}
*/
public void addGlobalProperty(String propertyName, String propertyValue);
/**
* @deprecated use {@link #saveGlobalProperty(GlobalProperty)}
*/
public void addGlobalProperty(GlobalProperty gp);
/**
* Allows code to be notified when a global property is created/edited/deleted.
*
* @see GlobalPropertyListener
* @param listener The listener to register
*/
public void addGlobalPropertyListener(GlobalPropertyListener listener);
/**
* Removes a GlobalPropertyListener previously registered by
* {@link #addGlobalPropertyListener(GlobalPropertyListener)}
*
* @param listener
*/
public void removeGlobalPropertyListener(GlobalPropertyListener listener);
/**
* Runs the <code>sql</code> on the database. If <code>selectOnly</code> is flagged then any
* non-select sql statements will be rejected.
*
* @param sql
* @param selectOnly
* @return ResultSet
* @throws APIException
* @should execute sql containing group by
*/
@Authorized(PrivilegeConstants.SQL_LEVEL_ACCESS)
public List<List<Object>> executeSQL(String sql, boolean selectOnly) throws APIException;
/**
* Get the implementation id stored for this server Returns null if no implementation id has
* been successfully set yet
*
* @return ImplementationId object that is this implementation's unique id
* @should return null if no implementation id is defined yet
*/
@Transactional(readOnly = true)
@Authorized(PrivilegeConstants.MANAGE_IMPLEMENTATION_ID)
public ImplementationId getImplementationId() throws APIException;
/**
* Set the given <code>implementationId</code> as this implementation's unique id
*
* @param implementationId the ImplementationId to save
* @throws APIException if implementationId is empty or is invalid according to central id
* server
* @should create implementation id in database
* @should overwrite implementation id in database if exists
* @should not fail if given implementationId is null
* @should throw APIException if given empty implementationId object
* @should throw APIException if given a caret in the implementationId code
* @should throw APIException if given a pipe in the implementationId code
* @should set uuid on implementation id global property
*/
@Authorized(PrivilegeConstants.MANAGE_IMPLEMENTATION_ID)
public void setImplementationId(ImplementationId implementationId) throws APIException;
/**
* Gets the list of locales which the administrator has allowed for use on the system. This is
* specified with a global property named
* {@link OpenmrsConstants#GLOBAL_PROPERTY_LOCALE_ALLOWED_LIST}.
*
* @return list of allowed locales
* @should return at least one locale if no locales defined in database yet
* @should not fail if not global property for locales allowed defined yet
* @should not return duplicates even if the global property has them
*/
@Transactional(readOnly = true)
public List<Locale> getAllowedLocales();
/**
* Gets the list of locales for which localized messages are available for the user interface
* (presentation layer). This set includes all the available locales (as indicated by the
* MessageSourceService) filtered by the allowed locales (as indicated by this
* AdministrationService).
*
* @return list of allowed presentation locales TODO change this return type to list?
* @should return at least one locale if no locales defined in database yet
* @should not return more locales than message source service locales
*/
@Transactional(readOnly = true)
public Set<Locale> getPresentationLocales();
/**
* Returns a global property according to the type specified
*
* @param <T>
* @param propertyName
* @should get property value in the proper type specified
* @should return default value if property name does not exist
* @return property value in the type of the default value
* @since 1.7
*/
public <T> T getGlobalPropertyValue(String propertyName, T defaultValue) throws APIException;
/**
* @param aClass class of object getting length for
* @param fieldName name of the field to get the length for
* @return the max field length of a property
*/
public int getMaximumPropertyLength(Class<? extends OpenmrsObject> aClass, String fieldName);
/**
* Performs validation in the manual flush mode to prevent any premature flushes.
* <p>
* Used by {@link ValidateUtil#validate(Object)}.
*
* @see FlushMode
* @since 1.9
* @param object
* @param errors
* @should pass for a valid object
* @should fail for an invalid object
*/
@Transactional(readOnly = true)
public void validate(Object object, Errors errors) throws APIException;
}
|
Back porting to 1.9.x: Unit of work - transaction boundary is not correct - Patient.form maybe more places - TRUNK-229
git-svn-id: 4718f6e240ca7210205b40b658d48b65419e2f31@27231 5bac5841-c719-aa4e-b3fe-cce5062f897a
|
api/src/main/java/org/openmrs/api/AdministrationService.java
|
Back porting to 1.9.x: Unit of work - transaction boundary is not correct - Patient.form maybe more places - TRUNK-229
|
|
Java
|
mpl-2.0
|
79b4df6576990341dd53402e75bb8abde651717a
| 0
|
qhanam/rhino,Angelfirenze/rhino,tuchida/rhino,ashwinrayaprolu1984/rhino,tuchida/rhino,swannodette/rhino,Pilarbrist/rhino,sainaen/rhino,Distrotech/rhino,sam/htmlunit-rhino-fork,AlexTrotsenko/rhino,sainaen/rhino,swannodette/rhino,tntim96/htmlunit-rhino-fork,tuchida/rhino,qhanam/rhino,tuchida/rhino,Pilarbrist/rhino,Angelfirenze/rhino,ashwinrayaprolu1984/rhino,sam/htmlunit-rhino-fork,tntim96/rhino-jscover,tejassaoji/RhinoCoarseTainting,swannodette/rhino,Pilarbrist/rhino,lv7777/egit_test,Angelfirenze/rhino,AlexTrotsenko/rhino,swannodette/rhino,jsdoc3/rhino,ashwinrayaprolu1984/rhino,ashwinrayaprolu1984/rhino,AlexTrotsenko/rhino,Angelfirenze/rhino,lv7777/egit_test,lv7777/egit_test,sam/htmlunit-rhino-fork,lv7777/egit_test,lv7777/egit_test,lv7777/egit_test,swannodette/rhino,ashwinrayaprolu1984/rhino,tuchida/rhino,tejassaoji/RhinoCoarseTainting,Pilarbrist/rhino,sainaen/rhino,jsdoc3/rhino,tntim96/rhino-apigee,ashwinrayaprolu1984/rhino,jsdoc3/rhino,sam/htmlunit-rhino-fork,rasmuserik/rhino,sam/htmlunit-rhino-fork,tejassaoji/RhinoCoarseTainting,sainaen/rhino,qhanam/rhino,tntim96/rhino-apigee,lv7777/egit_test,tntim96/rhino-jscover-repackaged,sainaen/rhino,tuchida/rhino,tejassaoji/RhinoCoarseTainting,AlexTrotsenko/rhino,rasmuserik/rhino,swannodette/rhino,tntim96/rhino-jscover,AlexTrotsenko/rhino,tuchida/rhino,Pilarbrist/rhino,tntim96/rhino-apigee,Angelfirenze/rhino,Pilarbrist/rhino,sainaen/rhino,sam/htmlunit-rhino-fork,Angelfirenze/rhino,sam/htmlunit-rhino-fork,Distrotech/rhino,ashwinrayaprolu1984/rhino,tntim96/htmlunit-rhino-fork,AlexTrotsenko/rhino,sainaen/rhino,AlexTrotsenko/rhino,qhanam/rhino,Pilarbrist/rhino,tejassaoji/RhinoCoarseTainting,InstantWebP2P/rhino-android,tntim96/rhino-jscover-repackaged,tejassaoji/RhinoCoarseTainting,tejassaoji/RhinoCoarseTainting,InstantWebP2P/rhino-android,Angelfirenze/rhino,swannodette/rhino
|
/* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Norris Boyd
* Igor Bukanov
* Ethan Hugg
* Bob Jervis
* Terry Lucas
* Milen Nankov
* Steve Yegge
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package org.mozilla.javascript;
import org.mozilla.javascript.ast.*;
import java.util.List;
import java.util.ArrayList;
/**
* This class rewrites the parse tree into an IR suitable for codegen.
*
* @see Node
* @author Mike McCabe
* @author Norris Boyd
*/
public final class IRFactory extends Parser
{
private static final int LOOP_DO_WHILE = 0;
private static final int LOOP_WHILE = 1;
private static final int LOOP_FOR = 2;
private static final int ALWAYS_TRUE_BOOLEAN = 1;
private static final int ALWAYS_FALSE_BOOLEAN = -1;
private Decompiler decompiler = new Decompiler();
public IRFactory() {
super();
}
public IRFactory(CompilerEnvirons env) {
this(env, env.getErrorReporter());
}
public IRFactory(CompilerEnvirons env, ErrorReporter errorReporter) {
super(env, errorReporter);
}
/**
* Transforms the tree into a lower-level IR suitable for codegen.
* Optionally generates the encoded source.
*/
public ScriptNode transformTree(AstRoot root) {
currentScriptOrFn = root;
int sourceStartOffset = decompiler.getCurrentOffset();
if (Token.printTrees) {
System.out.println("IRFactory.transformTree");
System.out.println(root.debugPrint());
}
ScriptNode script = (ScriptNode)transform(root);
int sourceEndOffset = decompiler.getCurrentOffset();
script.setEncodedSourceBounds(sourceStartOffset,
sourceEndOffset);
if (compilerEnv.isGeneratingSource()) {
script.setEncodedSource(decompiler.getEncodedSource());
}
decompiler = null;
return script;
}
// Might want to convert this to polymorphism - move transform*
// functions into the AstNode subclasses. OTOH that would make
// IR transformation part of the public AST API - desirable?
// Another possibility: create AstTransformer interface and adapter.
public Node transform(AstNode node) {
switch (node.getType()) {
case Token.ARRAYCOMP:
return transformArrayComp((ArrayComprehension)node);
case Token.ARRAYLIT:
return transformArrayLiteral((ArrayLiteral)node);
case Token.BLOCK:
return transformBlock(node);
case Token.BREAK:
return transformBreak((BreakStatement)node);
case Token.CALL:
return transformFunctionCall((FunctionCall)node);
case Token.CONTINUE:
return transformContinue((ContinueStatement)node);
case Token.DO:
return transformDoLoop((DoLoop)node);
case Token.EMPTY:
return node;
case Token.FOR:
if (node instanceof ForInLoop) {
return transformForInLoop((ForInLoop)node);
} else {
return transformForLoop((ForLoop)node);
}
case Token.FUNCTION:
return transformFunction((FunctionNode)node);
case Token.GETELEM:
return transformElementGet((ElementGet)node);
case Token.GETPROP:
return transformPropertyGet((PropertyGet)node);
case Token.HOOK:
return transformCondExpr((ConditionalExpression)node);
case Token.IF:
return transformIf((IfStatement)node);
case Token.TRUE:
case Token.FALSE:
case Token.THIS:
case Token.NULL:
case Token.DEBUGGER:
return transformLiteral(node);
case Token.NAME:
return transformName((Name)node);
case Token.NUMBER:
return transformNumber((NumberLiteral)node);
case Token.NEW:
return transformNewExpr((NewExpression)node);
case Token.OBJECTLIT:
return transformObjectLiteral((ObjectLiteral)node);
case Token.REGEXP:
return transformRegExp((RegExpLiteral)node);
case Token.RETURN:
return transformReturn((ReturnStatement)node);
case Token.SCRIPT:
return transformScript((ScriptNode)node);
case Token.STRING:
return transformString((StringLiteral)node);
case Token.SWITCH:
return transformSwitch((SwitchStatement)node);
case Token.THROW:
return transformThrow((ThrowStatement)node);
case Token.TRY:
return transformTry((TryStatement)node);
case Token.WHILE:
return transformWhileLoop((WhileLoop)node);
case Token.WITH:
return transformWith((WithStatement)node);
case Token.YIELD:
return transformYield((Yield)node);
default:
if (node instanceof ExpressionStatement) {
return transformExprStmt((ExpressionStatement)node);
}
if (node instanceof Assignment) {
return transformAssignment((Assignment)node);
}
if (node instanceof UnaryExpression) {
return transformUnary((UnaryExpression)node);
}
if (node instanceof XmlMemberGet) {
return transformXmlMemberGet((XmlMemberGet)node);
}
if (node instanceof InfixExpression) {
return transformInfix((InfixExpression)node);
}
if (node instanceof VariableDeclaration) {
return transformVariables((VariableDeclaration)node);
}
if (node instanceof ParenthesizedExpression) {
return transformParenExpr((ParenthesizedExpression)node);
}
if (node instanceof LabeledStatement) {
return transformLabeledStatement((LabeledStatement)node);
}
if (node instanceof LetNode) {
return transformLetNode((LetNode)node);
}
if (node instanceof XmlRef) {
return transformXmlRef((XmlRef)node);
}
if (node instanceof XmlLiteral) {
return transformXmlLiteral((XmlLiteral)node);
}
throw new IllegalArgumentException("Can't transform: " + node);
}
}
private Node transformArrayComp(ArrayComprehension node) {
// An array comprehension expression such as
//
// [expr for (x in foo) for each ([y, z] in bar) if (cond)]
//
// is rewritten approximately as
//
// new Scope(ARRAYCOMP) {
// new Node(BLOCK) {
// let tmp1 = new Array;
// for (let x in foo) {
// for each (let tmp2 in bar) {
// if (cond) {
// tmp1.push([y, z] = tmp2, expr);
// }
// }
// }
// }
// createName(tmp1)
// }
int lineno = node.getLineno();
Scope scopeNode = createScopeNode(Token.ARRAYCOMP, lineno);
String arrayName = currentScriptOrFn.getNextTempName();
pushScope(scopeNode);
try {
defineSymbol(Token.LET, arrayName, false);
Node block = new Node(Token.BLOCK, lineno);
Node newArray = createCallOrNew(Token.NEW, createName("Array"));
Node init = new Node(Token.EXPR_VOID,
createAssignment(Token.ASSIGN,
createName(arrayName),
newArray),
lineno);
block.addChildToBack(init);
block.addChildToBack(arrayCompTransformHelper(node, arrayName));
scopeNode.addChildToBack(block);
scopeNode.addChildToBack(createName(arrayName));
return scopeNode;
} finally {
popScope();
}
}
private Node arrayCompTransformHelper(ArrayComprehension node,
String arrayName) {
decompiler.addToken(Token.LB);
int lineno = node.getLineno();
Node expr = transform(node.getResult());
List<ArrayComprehensionLoop> loops = node.getLoops();
int numLoops = loops.size();
// Walk through loops, collecting and defining their iterator symbols.
Node[] iterators = new Node[numLoops];
Node[] iteratedObjs = new Node[numLoops];
for (int i = 0; i < numLoops; i++) {
ArrayComprehensionLoop acl = loops.get(i);
decompiler.addName(" ");
decompiler.addToken(Token.FOR);
if (acl.isForEach()) {
decompiler.addName("each ");
}
decompiler.addToken(Token.LP);
AstNode iter = acl.getIterator();
String name = null;
if (iter.getType() == Token.NAME) {
name = iter.getString();
decompiler.addName(name);
} else {
// destructuring assignment
decompile(iter);
name = currentScriptOrFn.getNextTempName();
defineSymbol(Token.LP, name, false);
expr = createBinary(Token.COMMA,
createAssignment(Token.ASSIGN,
iter,
createName(name)),
expr);
}
Node init = createName(name);
// Define as a let since we want the scope of the variable to
// be restricted to the array comprehension
defineSymbol(Token.LET, name, false);
iterators[i] = init;
decompiler.addToken(Token.IN);
iteratedObjs[i] = transform(acl.getIteratedObject());
decompiler.addToken(Token.RP);
}
// generate code for tmpArray.push(body)
Node call = createCallOrNew(Token.CALL,
createPropertyGet(createName(arrayName),
null,
"push", 0));
Node body = new Node(Token.EXPR_VOID, call, lineno);
if (node.getFilter() != null) {
decompiler.addName(" ");
decompiler.addToken(Token.IF);
decompiler.addToken(Token.LP);
body = createIf(transform(node.getFilter()), body, null, lineno);
decompiler.addToken(Token.RP);
}
// Now walk loops in reverse to build up the body statement.
int pushed = 0;
try {
for (int i = numLoops-1; i >= 0; i--) {
ArrayComprehensionLoop acl = loops.get(i);
Scope loop = createLoopNode(null, // no label
acl.getLineno());
pushScope(loop);
pushed++;
body = createForIn(Token.LET,
loop,
iterators[i],
iteratedObjs[i],
body,
acl.isForEach());
}
} finally {
for (int i = 0; i < pushed; i++) {
popScope();
}
}
decompiler.addToken(Token.RB);
// Now that we've accumulated any destructuring forms,
// add expr to the call node; it's pushed on each iteration.
call.addChildToBack(expr);
return body;
}
private Node transformArrayLiteral(ArrayLiteral node) {
if (node.isDestructuring()) {
return node;
}
decompiler.addToken(Token.LB);
List<AstNode> elems = node.getElements();
Node array = new Node(Token.ARRAYLIT);
List<Integer> skipIndexes = null;
for (int i = 0; i < elems.size(); ++i) {
AstNode elem = elems.get(i);
if (elem.getType() != Token.EMPTY) {
array.addChildToBack(transform(elem));
} else {
if (skipIndexes == null) {
skipIndexes = new ArrayList<Integer>();
}
skipIndexes.add(i);
}
if (i < elems.size() - 1)
decompiler.addToken(Token.COMMA);
}
decompiler.addToken(Token.RB);
array.putIntProp(Node.DESTRUCTURING_ARRAY_LENGTH,
node.getDestructuringLength());
if (skipIndexes != null) {
int[] skips = new int[skipIndexes.size()];
for (int i = 0; i < skipIndexes.size(); i++)
skips[i] = skipIndexes.get(i);
array.putProp(Node.SKIP_INDEXES_PROP, skips);
}
return array;
}
private Node transformAssignment(Assignment node) {
AstNode left = removeParens(node.getLeft());
Node target = null;
if (isDestructuring(left)) {
decompile(left);
target = left;
} else {
target = transform(left);
}
decompiler.addToken(node.getType());
return createAssignment(node.getType(),
target,
transform(node.getRight()));
}
private Node transformBlock(AstNode node) {
if (node instanceof Scope) {
pushScope((Scope)node);
}
try {
List<Node> kids = new ArrayList<Node>();
for (Node kid : node) {
kids.add(transform((AstNode)kid));
}
node.removeChildren();
for (Node kid : kids) {
node.addChildToBack(kid);
}
return node;
} finally {
if (node instanceof Scope) {
popScope();
}
}
}
private Node transformBreak(BreakStatement node) {
decompiler.addToken(Token.BREAK);
if (node.getBreakLabel() != null) {
decompiler.addName(node.getBreakLabel().getIdentifier());
}
decompiler.addEOL(Token.SEMI);
return node;
}
private Node transformCondExpr(ConditionalExpression node) {
Node test = transform(node.getTestExpression());
decompiler.addToken(Token.HOOK);
Node ifTrue = transform(node.getTrueExpression());
decompiler.addToken(Token.COLON);
Node ifFalse = transform(node.getFalseExpression());
return createCondExpr(test, ifTrue, ifFalse);
}
private Node transformContinue(ContinueStatement node) {
decompiler.addToken(Token.CONTINUE);
if (node.getLabel() != null) {
decompiler.addName(node.getLabel().getIdentifier());
}
decompiler.addEOL(Token.SEMI);
return node;
}
private Node transformDoLoop(DoLoop loop) {
loop.setType(Token.LOOP);
pushScope(loop);
try {
decompiler.addToken(Token.DO);
decompiler.addEOL(Token.LC);
Node body = transform(loop.getBody());
decompiler.addToken(Token.RC);
decompiler.addToken(Token.WHILE);
decompiler.addToken(Token.LP);
Node cond = transform(loop.getCondition());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.SEMI);
return createLoop(loop, LOOP_DO_WHILE,
body, cond, null, null);
} finally {
popScope();
}
}
private Node transformElementGet(ElementGet node) {
// OPT: could optimize to createPropertyGet
// iff elem is string that can not be number
Node target = transform(node.getTarget());
decompiler.addToken(Token.LB);
Node element = transform(node.getElement());
decompiler.addToken(Token.RB);
return new Node(Token.GETELEM, target, element);
}
private Node transformExprStmt(ExpressionStatement node) {
Node expr = transform(node.getExpression());
decompiler.addEOL(Token.SEMI);
return new Node(node.getType(), expr, node.getLineno());
}
private Node transformForInLoop(ForInLoop loop) {
decompiler.addToken(Token.FOR);
if (loop.isForEach())
decompiler.addName("each ");
decompiler.addToken(Token.LP);
loop.setType(Token.LOOP);
pushScope(loop);
try {
int declType = -1;
AstNode iter = loop.getIterator();
if (iter instanceof VariableDeclaration) {
declType = ((VariableDeclaration)iter).getType();
}
Node lhs = transform(iter);
decompiler.addToken(Token.IN);
Node obj = transform(loop.getIteratedObject());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node body = transform(loop.getBody());
decompiler.addEOL(Token.RC);
return createForIn(declType, loop, lhs, obj, body,
loop.isForEach());
} finally {
popScope();
}
}
private Node transformForLoop(ForLoop loop) {
decompiler.addToken(Token.FOR);
decompiler.addToken(Token.LP);
loop.setType(Token.LOOP);
// XXX: Can't use pushScope/popScope here since 'createFor' may split
// the scope
Scope savedScope = currentScope;
currentScope = loop;
try {
Node init = transform(loop.getInitializer());
decompiler.addToken(Token.SEMI);
Node test = transform(loop.getCondition());
decompiler.addToken(Token.SEMI);
Node incr = transform(loop.getIncrement());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node body = transform(loop.getBody());
decompiler.addEOL(Token.RC);
return createFor(loop, init, test, incr, body);
} finally {
currentScope = savedScope;
}
}
private Node transformFunction(FunctionNode fn) {
int functionType = fn.getFunctionType();
int start = decompiler.markFunctionStart(functionType);
Node mexpr = decompileFunctionHeader(fn);
int index = currentScriptOrFn.addFunction(fn);
PerFunctionVariables savedVars = new PerFunctionVariables(fn);
try {
// If we start needing to record much more codegen metadata during
// function parsing, we should lump it all into a helper class.
Node destructuring = (Node)fn.getProp(Node.DESTRUCTURING_PARAMS);
fn.removeProp(Node.DESTRUCTURING_PARAMS);
int lineno = fn.getBody().getLineno();
++nestingOfFunction; // only for body, not params
Node body = transform(fn.getBody());
if (!fn.isExpressionClosure()) {
decompiler.addToken(Token.RC);
}
fn.setEncodedSourceBounds(start, decompiler.markFunctionEnd(start));
if (functionType != FunctionNode.FUNCTION_EXPRESSION && !fn.isExpressionClosure()) {
// Add EOL only if function is not part of expression
// since it gets SEMI + EOL from Statement in that case
decompiler.addToken(Token.EOL);
}
if (destructuring != null) {
body.addChildToFront(new Node(Token.EXPR_VOID,
destructuring, lineno));
}
int syntheticType = fn.getFunctionType();
Node pn = initFunction(fn, index, body, syntheticType);
if (mexpr != null) {
pn = createAssignment(Token.ASSIGN, mexpr, pn);
if (syntheticType != FunctionNode.FUNCTION_EXPRESSION) {
pn = createExprStatementNoReturn(pn, fn.getLineno());
}
}
return pn;
} finally {
--nestingOfFunction;
savedVars.restore();
}
}
private Node transformFunctionCall(FunctionCall node) {
Node call = createCallOrNew(Token.CALL, transform(node.getTarget()));
call.setLineno(node.getLineno());
decompiler.addToken(Token.LP);
List<AstNode> args = node.getArguments();
for (int i = 0; i < args.size(); i++) {
AstNode arg = args.get(i);
call.addChildToBack(transform(arg));
if (i < args.size() - 1) {
decompiler.addToken(Token.COMMA);
}
}
decompiler.addToken(Token.RP);
return call;
}
private Node transformIf(IfStatement n) {
decompiler.addToken(Token.IF);
decompiler.addToken(Token.LP);
Node cond = transform(n.getCondition());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node ifTrue = transform(n.getThenPart());
Node ifFalse = null;
if (n.getElsePart() != null) {
decompiler.addToken(Token.RC);
decompiler.addToken(Token.ELSE);
decompiler.addEOL(Token.LC);
ifFalse = transform(n.getElsePart());
}
decompiler.addEOL(Token.RC);
return createIf(cond, ifTrue, ifFalse, n.getLineno());
}
private Node transformInfix(InfixExpression node) {
Node left = transform(node.getLeft());
decompiler.addToken(node.getType());
Node right = transform(node.getRight());
if (node instanceof XmlDotQuery) {
decompiler.addToken(Token.RP);
}
return createBinary(node.getType(), left, right);
}
private Node transformLabeledStatement(LabeledStatement ls) {
for (Label lb : ls.getLabels()) {
decompiler.addName(lb.getName());
decompiler.addEOL(Token.COLON);
}
Label label = ls.getFirstLabel();
Node statement = transform(ls.getStatement());
// Make a target and put it _after_ the statement node. Add in the
// LABEL node, so breaks get the right target.
Node breakTarget = Node.newTarget();
Node block = new Node(Token.BLOCK, label, statement, breakTarget);
label.target = breakTarget;
return block;
}
private Node transformLetNode(LetNode node) {
pushScope(node);
try {
decompiler.addToken(Token.LET);
decompiler.addToken(Token.LP);
Node vars = transformVariableInitializers(node.getVariables());
decompiler.addToken(Token.RP);
node.addChildToBack(vars);
boolean letExpr = node.getType() == Token.LETEXPR;
if (node.getBody() != null) {
if (letExpr) {
decompiler.addName(" ");
} else {
decompiler.addEOL(Token.LC);
}
node.addChildToBack(transform(node.getBody()));
if (!letExpr) {
decompiler.addEOL(Token.RC);
}
}
return node;
} finally {
popScope();
}
}
private Node transformLiteral(AstNode node) {
decompiler.addToken(node.getType());
return node;
}
private Node transformName(Name node) {
decompiler.addName(node.getIdentifier());
return node;
}
private Node transformNewExpr(NewExpression node) {
decompiler.addToken(Token.NEW);
Node nx = createCallOrNew(Token.NEW, transform(node.getTarget()));
nx.setLineno(node.getLineno());
List<AstNode> args = node.getArguments();
if (!args.isEmpty())
decompiler.addToken(Token.LP);
for (int i = 0; i < args.size(); i++) {
AstNode arg = args.get(i);
nx.addChildToBack(transform(arg));
if (i < args.size() - 1) {
decompiler.addToken(Token.COMMA);
}
}
if (!args.isEmpty())
decompiler.addToken(Token.RP);
if (node.getInitializer() != null) {
nx.addChildToBack(transformObjectLiteral(node.getInitializer()));
}
return nx;
}
private Node transformNumber(NumberLiteral node) {
decompiler.addNumber(node.getNumber());
return node;
}
private Node transformObjectLiteral(ObjectLiteral node) {
if (node.isDestructuring()) {
return node;
}
// createObjectLiteral rewrites its argument as object
// creation plus object property entries, so later compiler
// stages don't need to know about object literals.
decompiler.addToken(Token.LC);
List<ObjectProperty> elems = node.getElements();
Node object = new Node(Token.OBJECTLIT);
Object[] properties;
if (elems.isEmpty()) {
properties = ScriptRuntime.emptyArgs;
} else {
int size = elems.size(), i = 0;
properties = new Object[size];
for (ObjectProperty prop : elems) {
if (prop.isGetter()) {
decompiler.addToken(Token.GET);
} else if (prop.isSetter()) {
decompiler.addToken(Token.SET);
}
properties[i++] = getPropKey(prop.getLeft());
// OBJECTLIT is used as ':' in object literal for
// decompilation to solve spacing ambiguity.
if (!(prop.isGetter() || prop.isSetter())) {
decompiler.addToken(Token.OBJECTLIT);
}
Node right = transform(prop.getRight());
if (prop.isGetter()) {
right = createUnary(Token.GET, right);
} else if (prop.isSetter()) {
right = createUnary(Token.SET, right);
}
object.addChildToBack(right);
if (i < size) {
decompiler.addToken(Token.COMMA);
}
}
}
decompiler.addToken(Token.RC);
object.putProp(Node.OBJECT_IDS_PROP, properties);
return object;
}
private Object getPropKey(Node id) {
Object key;
if (id instanceof Name) {
String s = ((Name)id).getIdentifier();
decompiler.addName(s);
key = ScriptRuntime.getIndexObject(s);
} else if (id instanceof StringLiteral) {
String s = ((StringLiteral)id).getValue();
decompiler.addString(s);
key = ScriptRuntime.getIndexObject(s);
} else if (id instanceof NumberLiteral) {
double n = ((NumberLiteral)id).getNumber();
decompiler.addNumber(n);
key = ScriptRuntime.getIndexObject(n);
} else {
throw Kit.codeBug();
}
return key;
}
private Node transformParenExpr(ParenthesizedExpression node) {
AstNode expr = node.getExpression();
decompiler.addToken(Token.LP);
int count = 1;
while (expr instanceof ParenthesizedExpression) {
decompiler.addToken(Token.LP);
count++;
expr = ((ParenthesizedExpression)expr).getExpression();
}
Node result = transform(expr);
for (int i = 0; i < count; i++) {
decompiler.addToken(Token.RP);
}
result.putProp(Node.PARENTHESIZED_PROP, Boolean.TRUE);
return result;
}
private Node transformPropertyGet(PropertyGet node) {
Node target = transform(node.getTarget());
String name = node.getProperty().getIdentifier();
decompiler.addToken(Token.DOT);
decompiler.addName(name);
return createPropertyGet(target, null, name, 0);
}
private Node transformRegExp(RegExpLiteral node) {
decompiler.addRegexp(node.getValue(), node.getFlags());
currentScriptOrFn.addRegExp(node);
return node;
}
private Node transformReturn(ReturnStatement node) {
if (Boolean.TRUE.equals(node.getProp(Node.EXPRESSION_CLOSURE_PROP))) {
decompiler.addName(" ");
} else {
decompiler.addToken(Token.RETURN);
}
AstNode rv = node.getReturnValue();
Node value = rv == null ? null : transform(rv);
decompiler.addEOL(Token.SEMI);
return rv == null
? new Node(Token.RETURN, node.getLineno())
: new Node(Token.RETURN, value, node.getLineno());
}
private Node transformScript(ScriptNode node) {
decompiler.addToken(Token.SCRIPT);
if (currentScope != null) Kit.codeBug();
currentScope = node;
Node body = new Node(Token.BLOCK);
for (Node kid : node) {
body.addChildToBack(transform((AstNode)kid));
}
node.removeChildren();
Node children = body.getFirstChild();
if (children != null) {
node.addChildrenToBack(children);
}
return node;
}
private Node transformString(StringLiteral node) {
decompiler.addString(node.getValue());
return Node.newString(node.getValue());
}
private Node transformSwitch(SwitchStatement node) {
// The switch will be rewritten from:
//
// switch (expr) {
// case test1: statements1;
// ...
// default: statementsDefault;
// ...
// case testN: statementsN;
// }
//
// to:
//
// {
// switch (expr) {
// case test1: goto label1;
// ...
// case testN: goto labelN;
// }
// goto labelDefault;
// label1:
// statements1;
// ...
// labelDefault:
// statementsDefault;
// ...
// labelN:
// statementsN;
// breakLabel:
// }
//
// where inside switch each "break;" without label will be replaced
// by "goto breakLabel".
//
// If the original switch does not have the default label, then
// after the switch he transformed code would contain this goto:
// goto breakLabel;
// instead of:
// goto labelDefault;
decompiler.addToken(Token.SWITCH);
decompiler.addToken(Token.LP);
Node switchExpr = transform(node.getExpression());
decompiler.addToken(Token.RP);
node.addChildToBack(switchExpr);
Node block = new Node(Token.BLOCK, node, node.getLineno());
decompiler.addEOL(Token.LC);
for (SwitchCase sc : node.getCases()) {
AstNode expr = sc.getExpression();
Node caseExpr = null;
if (expr != null) {
decompiler.addToken(Token.CASE);
caseExpr = transform(expr);
} else {
decompiler.addToken(Token.DEFAULT);
}
decompiler.addEOL(Token.COLON);
List<AstNode> stmts = sc.getStatements();
Node body = new Block();
if (stmts != null) {
for (AstNode kid : stmts) {
body.addChildToBack(transform(kid));
}
}
addSwitchCase(block, caseExpr, body);
}
decompiler.addEOL(Token.RC);
closeSwitch(block);
return block;
}
private Node transformThrow(ThrowStatement node) {
decompiler.addToken(Token.THROW);
Node value = transform(node.getExpression());
decompiler.addEOL(Token.SEMI);
return new Node(Token.THROW, value, node.getLineno());
}
private Node transformTry(TryStatement node) {
decompiler.addToken(Token.TRY);
decompiler.addEOL(Token.LC);
Node tryBlock = transform(node.getTryBlock());
decompiler.addEOL(Token.RC);
Node catchBlocks = new Block();
for (CatchClause cc : node.getCatchClauses()) {
decompiler.addToken(Token.CATCH);
decompiler.addToken(Token.LP);
String varName = cc.getVarName().getIdentifier();
decompiler.addName(varName);
Node catchCond = null;
AstNode ccc = cc.getCatchCondition();
if (ccc != null) {
decompiler.addName(" ");
decompiler.addToken(Token.IF);
catchCond = transform(ccc);
} else {
catchCond = new EmptyExpression();
}
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node body = transform(cc.getBody());
decompiler.addEOL(Token.RC);
catchBlocks.addChildToBack(createCatch(varName, catchCond,
body, cc.getLineno()));
}
Node finallyBlock = null;
if (node.getFinallyBlock() != null) {
decompiler.addToken(Token.FINALLY);
decompiler.addEOL(Token.LC);
finallyBlock = transform(node.getFinallyBlock());
decompiler.addEOL(Token.RC);
}
return createTryCatchFinally(tryBlock, catchBlocks,
finallyBlock, node.getLineno());
}
private Node transformUnary(UnaryExpression node) {
int type = node.getType();
if (type == Token.DEFAULTNAMESPACE) {
return transformDefaultXmlNamepace(node);
}
if (node.isPrefix()) {
decompiler.addToken(type);
}
Node child = transform(node.getOperand());
if (node.isPostfix()) {
decompiler.addToken(type);
}
if (type == Token.INC || type == Token.DEC) {
return createIncDec(type, node.isPostfix(), child);
}
return createUnary(type, child);
}
private Node transformVariables(VariableDeclaration node) {
decompiler.addToken(node.getType());
transformVariableInitializers(node);
// Might be most robust to have parser record whether it was
// a variable declaration statement, possibly as a node property.
AstNode parent = node.getParent();
if (!(parent instanceof Loop)
&& !(parent instanceof LetNode)) {
decompiler.addEOL(Token.SEMI);
}
return node;
}
private Node transformVariableInitializers(VariableDeclaration node) {
List<VariableInitializer> vars = node.getVariables();
int size = vars.size(), i = 0;
for (VariableInitializer var : vars) {
AstNode target = var.getTarget();
AstNode init = var.getInitializer();
Node left = null;
if (var.isDestructuring()) {
decompile(target); // decompile but don't transform
left = target;
} else {
left = transform(target);
}
Node right = null;
if (init != null) {
decompiler.addToken(Token.ASSIGN);
right = transform(init);
}
if (var.isDestructuring()) {
if (right == null) { // TODO: should this ever happen?
node.addChildToBack(left);
} else {
Node d = createDestructuringAssignment(node.getType(),
left, right);
node.addChildToBack(d);
}
} else {
if (right != null) {
left.addChildToBack(right);
}
node.addChildToBack(left);
}
if (i++ < size-1) {
decompiler.addToken(Token.COMMA);
}
}
return node;
}
private Node transformWhileLoop(WhileLoop loop) {
decompiler.addToken(Token.WHILE);
loop.setType(Token.LOOP);
pushScope(loop);
try {
decompiler.addToken(Token.LP);
Node cond = transform(loop.getCondition());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node body = transform(loop.getBody());
decompiler.addEOL(Token.RC);
return createLoop(loop, LOOP_WHILE, body, cond, null, null);
} finally {
popScope();
}
}
private Node transformWith(WithStatement node) {
decompiler.addToken(Token.WITH);
decompiler.addToken(Token.LP);
Node expr = transform(node.getExpression());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node stmt = transform(node.getStatement());
decompiler.addEOL(Token.RC);
return createWith(expr, stmt, node.getLineno());
}
private Node transformYield(Yield node) {
decompiler.addToken(Token.YIELD);
Node kid = node.getValue() == null ? null : transform(node.getValue());
if (kid != null)
return new Node(Token.YIELD, kid, node.getLineno());
else
return new Node(Token.YIELD, node.getLineno());
}
private Node transformXmlLiteral(XmlLiteral node) {
// a literal like <foo>{bar}</foo> is rewritten as
// new XML("<foo>" + bar + "</foo>");
Node pnXML = new Node(Token.NEW, node.getLineno());
List<XmlFragment> frags = node.getFragments();
XmlString first = (XmlString)frags.get(0);
boolean anon = first.getXml().trim().startsWith("<>");
pnXML.addChildToBack(createName(anon ? "XMLList" : "XML"));
Node pn = null;
for (XmlFragment frag : frags) {
if (frag instanceof XmlString) {
String xml = ((XmlString)frag).getXml();
decompiler.addName(xml);
if (pn == null) {
pn = createString(xml);
} else {
pn = createBinary(Token.ADD, pn, createString(xml));
}
} else {
XmlExpression xexpr = (XmlExpression)frag;
boolean isXmlAttr = xexpr.isXmlAttribute();
Node expr;
decompiler.addToken(Token.LC);
if (xexpr.getExpression() instanceof EmptyExpression) {
expr = createString("");
} else {
expr = transform(xexpr.getExpression());
}
decompiler.addToken(Token.RC);
if (isXmlAttr) {
// Need to put the result in double quotes
expr = createUnary(Token.ESCXMLATTR, expr);
Node prepend = createBinary(Token.ADD,
createString("\""),
expr);
expr = createBinary(Token.ADD,
prepend,
createString("\""));
} else {
expr = createUnary(Token.ESCXMLTEXT, expr);
}
pn = createBinary(Token.ADD, pn, expr);
}
}
pnXML.addChildToBack(pn);
return pnXML;
}
private Node transformXmlMemberGet(XmlMemberGet node) {
XmlRef ref = node.getMemberRef();
Node pn = transform(node.getLeft());
int flags = ref.isAttributeAccess() ? Node.ATTRIBUTE_FLAG : 0;
if (node.getType() == Token.DOTDOT) {
flags |= Node.DESCENDANTS_FLAG;
decompiler.addToken(Token.DOTDOT);
} else {
decompiler.addToken(Token.DOT);
}
return transformXmlRef(pn, ref, flags);
}
// We get here if we weren't a child of a . or .. infix node
private Node transformXmlRef(XmlRef node) {
int memberTypeFlags = node.isAttributeAccess()
? Node.ATTRIBUTE_FLAG : 0;
return transformXmlRef(null, node, memberTypeFlags);
}
private Node transformXmlRef(Node pn, XmlRef node, int memberTypeFlags) {
if ((memberTypeFlags & Node.ATTRIBUTE_FLAG) != 0)
decompiler.addToken(Token.XMLATTR);
Name namespace = node.getNamespace();
String ns = namespace != null ? namespace.getIdentifier() : null;
if (ns != null) {
decompiler.addName(ns);
decompiler.addToken(Token.COLONCOLON);
}
if (node instanceof XmlPropRef) {
String name = ((XmlPropRef)node).getPropName().getIdentifier();
decompiler.addName(name);
return createPropertyGet(pn, ns, name, memberTypeFlags);
} else {
decompiler.addToken(Token.LB);
Node expr = transform(((XmlElemRef)node).getExpression());
decompiler.addToken(Token.RB);
return createElementGet(pn, ns, expr, memberTypeFlags);
}
}
private Node transformDefaultXmlNamepace(UnaryExpression node) {
decompiler.addToken(Token.DEFAULT);
decompiler.addName(" xml");
decompiler.addName(" namespace");
decompiler.addToken(Token.ASSIGN);
Node child = transform(node.getOperand());
return createUnary(Token.DEFAULTNAMESPACE, child);
}
/**
* If caseExpression argument is null it indicates a default label.
*/
private void addSwitchCase(Node switchBlock, Node caseExpression,
Node statements)
{
if (switchBlock.getType() != Token.BLOCK) throw Kit.codeBug();
Jump switchNode = (Jump)switchBlock.getFirstChild();
if (switchNode.getType() != Token.SWITCH) throw Kit.codeBug();
Node gotoTarget = Node.newTarget();
if (caseExpression != null) {
Jump caseNode = new Jump(Token.CASE, caseExpression);
caseNode.target = gotoTarget;
switchNode.addChildToBack(caseNode);
} else {
switchNode.setDefault(gotoTarget);
}
switchBlock.addChildToBack(gotoTarget);
switchBlock.addChildToBack(statements);
}
private void closeSwitch(Node switchBlock)
{
if (switchBlock.getType() != Token.BLOCK) throw Kit.codeBug();
Jump switchNode = (Jump)switchBlock.getFirstChild();
if (switchNode.getType() != Token.SWITCH) throw Kit.codeBug();
Node switchBreakTarget = Node.newTarget();
// switchNode.target is only used by NodeTransformer
// to detect switch end
switchNode.target = switchBreakTarget;
Node defaultTarget = switchNode.getDefault();
if (defaultTarget == null) {
defaultTarget = switchBreakTarget;
}
switchBlock.addChildAfter(makeJump(Token.GOTO, defaultTarget),
switchNode);
switchBlock.addChildToBack(switchBreakTarget);
}
private Node createExprStatementNoReturn(Node expr, int lineno) {
return new Node(Token.EXPR_VOID, expr, lineno);
}
private Node createString(String string) {
return Node.newString(string);
}
/**
* Catch clause of try/catch/finally
* @param varName the name of the variable to bind to the exception
* @param catchCond the condition under which to catch the exception.
* May be null if no condition is given.
* @param stmts the statements in the catch clause
* @param lineno the starting line number of the catch clause
*/
private Node createCatch(String varName, Node catchCond, Node stmts,
int lineno) {
if (catchCond == null) {
catchCond = new Node(Token.EMPTY);
}
return new Node(Token.CATCH, createName(varName),
catchCond, stmts, lineno);
}
private Node initFunction(FunctionNode fnNode, int functionIndex,
Node statements, int functionType) {
fnNode.setFunctionType(functionType);
fnNode.addChildToBack(statements);
int functionCount = fnNode.getFunctionCount();
if (functionCount != 0) {
// Functions containing other functions require activation objects
fnNode.setRequiresActivation();
}
if (functionType == FunctionNode.FUNCTION_EXPRESSION) {
Name name = fnNode.getFunctionName();
if (name != null && name.length() != 0) {
// A function expression needs to have its name as a
// variable (if it isn't already allocated as a variable).
// See ECMA Ch. 13. We add code to the beginning of the
// function to initialize a local variable of the
// function's name to the function value.
Node setFn = new Node(Token.EXPR_VOID,
new Node(Token.SETNAME,
Node.newString(Token.BINDNAME,
name.getIdentifier()),
new Node(Token.THISFN)));
statements.addChildrenToFront(setFn);
}
}
// Add return to end if needed.
Node lastStmt = statements.getLastChild();
if (lastStmt == null || lastStmt.getType() != Token.RETURN) {
statements.addChildToBack(new Node(Token.RETURN));
}
Node result = Node.newString(Token.FUNCTION, fnNode.getName());
result.putIntProp(Node.FUNCTION_PROP, functionIndex);
return result;
}
/**
* Create loop node. The code generator will later call
* createWhile|createDoWhile|createFor|createForIn
* to finish loop generation.
*/
private Scope createLoopNode(Node loopLabel, int lineno) {
Scope result = createScopeNode(Token.LOOP, lineno);
if (loopLabel != null) {
((Jump)loopLabel).setLoop(result);
}
return result;
}
private Node createFor(Scope loop, Node init,
Node test, Node incr, Node body) {
if (init.getType() == Token.LET) {
// rewrite "for (let i=s; i < N; i++)..." as
// "let (i=s) { for (; i < N; i++)..." so that "s" is evaluated
// outside the scope of the for.
Scope let = Scope.splitScope(loop);
let.setType(Token.LET);
let.addChildrenToBack(init);
let.addChildToBack(createLoop(loop, LOOP_FOR, body, test,
new Node(Token.EMPTY), incr));
return let;
}
return createLoop(loop, LOOP_FOR, body, test, init, incr);
}
private Node createLoop(Jump loop, int loopType, Node body,
Node cond, Node init, Node incr)
{
Node bodyTarget = Node.newTarget();
Node condTarget = Node.newTarget();
if (loopType == LOOP_FOR && cond.getType() == Token.EMPTY) {
cond = new Node(Token.TRUE);
}
Jump IFEQ = new Jump(Token.IFEQ, cond);
IFEQ.target = bodyTarget;
Node breakTarget = Node.newTarget();
loop.addChildToBack(bodyTarget);
loop.addChildrenToBack(body);
if (loopType == LOOP_WHILE || loopType == LOOP_FOR) {
// propagate lineno to condition
loop.addChildrenToBack(new Node(Token.EMPTY, loop.getLineno()));
}
loop.addChildToBack(condTarget);
loop.addChildToBack(IFEQ);
loop.addChildToBack(breakTarget);
loop.target = breakTarget;
Node continueTarget = condTarget;
if (loopType == LOOP_WHILE || loopType == LOOP_FOR) {
// Just add a GOTO to the condition in the do..while
loop.addChildToFront(makeJump(Token.GOTO, condTarget));
if (loopType == LOOP_FOR) {
int initType = init.getType();
if (initType != Token.EMPTY) {
if (initType != Token.VAR && initType != Token.LET) {
init = new Node(Token.EXPR_VOID, init);
}
loop.addChildToFront(init);
}
Node incrTarget = Node.newTarget();
loop.addChildAfter(incrTarget, body);
if (incr.getType() != Token.EMPTY) {
incr = new Node(Token.EXPR_VOID, incr);
loop.addChildAfter(incr, incrTarget);
}
continueTarget = incrTarget;
}
}
loop.setContinue(continueTarget);
return loop;
}
/**
* Generate IR for a for..in loop.
*/
private Node createForIn(int declType, Node loop, Node lhs,
Node obj, Node body, boolean isForEach)
{
int destructuring = -1;
int destructuringLen = 0;
Node lvalue;
int type = lhs.getType();
if (type == Token.VAR || type == Token.LET) {
Node kid = lhs.getLastChild();
int kidType = kid.getType();
if (kidType == Token.ARRAYLIT || kidType == Token.OBJECTLIT)
{
type = destructuring = kidType;
lvalue = kid;
destructuringLen = 0;
if (kid instanceof ArrayLiteral)
destructuringLen = ((ArrayLiteral) kid).getDestructuringLength();
} else if (kidType == Token.NAME) {
lvalue = Node.newString(Token.NAME, kid.getString());
} else {
reportError("msg.bad.for.in.lhs");
return null;
}
} else if (type == Token.ARRAYLIT || type == Token.OBJECTLIT) {
destructuring = type;
lvalue = lhs;
destructuringLen = 0;
if (lhs instanceof ArrayLiteral)
destructuringLen = ((ArrayLiteral) lhs).getDestructuringLength();
} else {
lvalue = makeReference(lhs);
if (lvalue == null) {
reportError("msg.bad.for.in.lhs");
return null;
}
}
Node localBlock = new Node(Token.LOCAL_BLOCK);
int initType = isForEach ? Token.ENUM_INIT_VALUES
: (destructuring != -1
? Token.ENUM_INIT_ARRAY
: Token.ENUM_INIT_KEYS);
Node init = new Node(initType, obj);
init.putProp(Node.LOCAL_BLOCK_PROP, localBlock);
Node cond = new Node(Token.ENUM_NEXT);
cond.putProp(Node.LOCAL_BLOCK_PROP, localBlock);
Node id = new Node(Token.ENUM_ID);
id.putProp(Node.LOCAL_BLOCK_PROP, localBlock);
Node newBody = new Node(Token.BLOCK);
Node assign;
if (destructuring != -1) {
assign = createDestructuringAssignment(declType, lvalue, id);
if (!isForEach && (destructuring == Token.OBJECTLIT ||
destructuringLen != 2))
{
// destructuring assignment is only allowed in for..each or
// with an array type of length 2 (to hold key and value)
reportError("msg.bad.for.in.destruct");
}
} else {
assign = simpleAssignment(lvalue, id);
}
newBody.addChildToBack(new Node(Token.EXPR_VOID, assign));
newBody.addChildToBack(body);
loop = createLoop((Jump)loop, LOOP_WHILE, newBody, cond, null, null);
loop.addChildToFront(init);
if (type == Token.VAR || type == Token.LET)
loop.addChildToFront(lhs);
localBlock.addChildToBack(loop);
return localBlock;
}
/**
* Try/Catch/Finally
*
* The IRFactory tries to express as much as possible in the tree;
* the responsibilities remaining for Codegen are to add the Java
* handlers: (Either (but not both) of TARGET and FINALLY might not
* be defined)
*
* - a catch handler for javascript exceptions that unwraps the
* exception onto the stack and GOTOes to the catch target
*
* - a finally handler
*
* ... and a goto to GOTO around these handlers.
*/
private Node createTryCatchFinally(Node tryBlock, Node catchBlocks,
Node finallyBlock, int lineno)
{
boolean hasFinally = (finallyBlock != null)
&& (finallyBlock.getType() != Token.BLOCK
|| finallyBlock.hasChildren());
// short circuit
if (tryBlock.getType() == Token.BLOCK && !tryBlock.hasChildren()
&& !hasFinally)
{
return tryBlock;
}
boolean hasCatch = catchBlocks.hasChildren();
// short circuit
if (!hasFinally && !hasCatch) {
// bc finally might be an empty block...
return tryBlock;
}
Node handlerBlock = new Node(Token.LOCAL_BLOCK);
Jump pn = new Jump(Token.TRY, tryBlock, lineno);
pn.putProp(Node.LOCAL_BLOCK_PROP, handlerBlock);
if (hasCatch) {
// jump around catch code
Node endCatch = Node.newTarget();
pn.addChildToBack(makeJump(Token.GOTO, endCatch));
// make a TARGET for the catch that the tcf node knows about
Node catchTarget = Node.newTarget();
pn.target = catchTarget;
// mark it
pn.addChildToBack(catchTarget);
//
// Given
//
// try {
// tryBlock;
// } catch (e if condition1) {
// something1;
// ...
//
// } catch (e if conditionN) {
// somethingN;
// } catch (e) {
// somethingDefault;
// }
//
// rewrite as
//
// try {
// tryBlock;
// goto after_catch:
// } catch (x) {
// with (newCatchScope(e, x)) {
// if (condition1) {
// something1;
// goto after_catch;
// }
// }
// ...
// with (newCatchScope(e, x)) {
// if (conditionN) {
// somethingN;
// goto after_catch;
// }
// }
// with (newCatchScope(e, x)) {
// somethingDefault;
// goto after_catch;
// }
// }
// after_catch:
//
// If there is no default catch, then the last with block
// arround "somethingDefault;" is replaced by "rethrow;"
// It is assumed that catch handler generation will store
// exeception object in handlerBlock register
// Block with local for exception scope objects
Node catchScopeBlock = new Node(Token.LOCAL_BLOCK);
// expects catchblocks children to be (cond block) pairs.
Node cb = catchBlocks.getFirstChild();
boolean hasDefault = false;
int scopeIndex = 0;
while (cb != null) {
int catchLineNo = cb.getLineno();
Node name = cb.getFirstChild();
Node cond = name.getNext();
Node catchStatement = cond.getNext();
cb.removeChild(name);
cb.removeChild(cond);
cb.removeChild(catchStatement);
// Add goto to the catch statement to jump out of catch
// but prefix it with LEAVEWITH since try..catch produces
// "with"code in order to limit the scope of the exception
// object.
catchStatement.addChildToBack(new Node(Token.LEAVEWITH));
catchStatement.addChildToBack(makeJump(Token.GOTO, endCatch));
// Create condition "if" when present
Node condStmt;
if (cond.getType() == Token.EMPTY) {
condStmt = catchStatement;
hasDefault = true;
} else {
condStmt = createIf(cond, catchStatement, null,
catchLineNo);
}
// Generate code to create the scope object and store
// it in catchScopeBlock register
Node catchScope = new Node(Token.CATCH_SCOPE, name,
createUseLocal(handlerBlock));
catchScope.putProp(Node.LOCAL_BLOCK_PROP, catchScopeBlock);
catchScope.putIntProp(Node.CATCH_SCOPE_PROP, scopeIndex);
catchScopeBlock.addChildToBack(catchScope);
// Add with statement based on catch scope object
catchScopeBlock.addChildToBack(
createWith(createUseLocal(catchScopeBlock), condStmt,
catchLineNo));
// move to next cb
cb = cb.getNext();
++scopeIndex;
}
pn.addChildToBack(catchScopeBlock);
if (!hasDefault) {
// Generate code to rethrow if no catch clause was executed
Node rethrow = new Node(Token.RETHROW);
rethrow.putProp(Node.LOCAL_BLOCK_PROP, handlerBlock);
pn.addChildToBack(rethrow);
}
pn.addChildToBack(endCatch);
}
if (hasFinally) {
Node finallyTarget = Node.newTarget();
pn.setFinally(finallyTarget);
// add jsr finally to the try block
pn.addChildToBack(makeJump(Token.JSR, finallyTarget));
// jump around finally code
Node finallyEnd = Node.newTarget();
pn.addChildToBack(makeJump(Token.GOTO, finallyEnd));
pn.addChildToBack(finallyTarget);
Node fBlock = new Node(Token.FINALLY, finallyBlock);
fBlock.putProp(Node.LOCAL_BLOCK_PROP, handlerBlock);
pn.addChildToBack(fBlock);
pn.addChildToBack(finallyEnd);
}
handlerBlock.addChildToBack(pn);
return handlerBlock;
}
private Node createWith(Node obj, Node body, int lineno) {
setRequiresActivation();
Node result = new Node(Token.BLOCK, lineno);
result.addChildToBack(new Node(Token.ENTERWITH, obj));
Node bodyNode = new Node(Token.WITH, body, lineno);
result.addChildrenToBack(bodyNode);
result.addChildToBack(new Node(Token.LEAVEWITH));
return result;
}
private Node createIf(Node cond, Node ifTrue, Node ifFalse, int lineno)
{
int condStatus = isAlwaysDefinedBoolean(cond);
if (condStatus == ALWAYS_TRUE_BOOLEAN) {
return ifTrue;
} else if (condStatus == ALWAYS_FALSE_BOOLEAN) {
if (ifFalse != null) {
return ifFalse;
}
// Replace if (false) xxx by empty block
return new Node(Token.BLOCK, lineno);
}
Node result = new Node(Token.BLOCK, lineno);
Node ifNotTarget = Node.newTarget();
Jump IFNE = new Jump(Token.IFNE, cond);
IFNE.target = ifNotTarget;
result.addChildToBack(IFNE);
result.addChildrenToBack(ifTrue);
if (ifFalse != null) {
Node endTarget = Node.newTarget();
result.addChildToBack(makeJump(Token.GOTO, endTarget));
result.addChildToBack(ifNotTarget);
result.addChildrenToBack(ifFalse);
result.addChildToBack(endTarget);
} else {
result.addChildToBack(ifNotTarget);
}
return result;
}
private Node createCondExpr(Node cond, Node ifTrue, Node ifFalse) {
int condStatus = isAlwaysDefinedBoolean(cond);
if (condStatus == ALWAYS_TRUE_BOOLEAN) {
return ifTrue;
} else if (condStatus == ALWAYS_FALSE_BOOLEAN) {
return ifFalse;
}
return new Node(Token.HOOK, cond, ifTrue, ifFalse);
}
private Node createUnary(int nodeType, Node child)
{
int childType = child.getType();
switch (nodeType) {
case Token.DELPROP: {
Node n;
if (childType == Token.NAME) {
// Transform Delete(Name "a")
// to Delete(Bind("a"), String("a"))
child.setType(Token.BINDNAME);
Node left = child;
Node right = Node.newString(child.getString());
n = new Node(nodeType, left, right);
} else if (childType == Token.GETPROP ||
childType == Token.GETELEM)
{
Node left = child.getFirstChild();
Node right = child.getLastChild();
child.removeChild(left);
child.removeChild(right);
n = new Node(nodeType, left, right);
} else if (childType == Token.GET_REF) {
Node ref = child.getFirstChild();
child.removeChild(ref);
n = new Node(Token.DEL_REF, ref);
} else if (childType == Token.CALL) {
n = new Node(nodeType, new Node(Token.TRUE), child);
} else {
n = new Node(Token.TRUE);
}
return n;
}
case Token.TYPEOF:
if (childType == Token.NAME) {
child.setType(Token.TYPEOFNAME);
return child;
}
break;
case Token.BITNOT:
if (childType == Token.NUMBER) {
int value = ScriptRuntime.toInt32(child.getDouble());
child.setDouble(~value);
return child;
}
break;
case Token.NEG:
if (childType == Token.NUMBER) {
child.setDouble(-child.getDouble());
return child;
}
break;
case Token.NOT: {
int status = isAlwaysDefinedBoolean(child);
if (status != 0) {
int type;
if (status == ALWAYS_TRUE_BOOLEAN) {
type = Token.FALSE;
} else {
type = Token.TRUE;
}
if (childType == Token.TRUE || childType == Token.FALSE) {
child.setType(type);
return child;
}
return new Node(type);
}
break;
}
}
return new Node(nodeType, child);
}
private Node createCallOrNew(int nodeType, Node child) {
int type = Node.NON_SPECIALCALL;
if (child.getType() == Token.NAME) {
String name = child.getString();
if (name.equals("eval")) {
type = Node.SPECIALCALL_EVAL;
} else if (name.equals("With")) {
type = Node.SPECIALCALL_WITH;
}
} else if (child.getType() == Token.GETPROP) {
String name = child.getLastChild().getString();
if (name.equals("eval")) {
type = Node.SPECIALCALL_EVAL;
}
}
Node node = new Node(nodeType, child);
if (type != Node.NON_SPECIALCALL) {
// Calls to these functions require activation objects.
setRequiresActivation();
node.putIntProp(Node.SPECIALCALL_PROP, type);
}
return node;
}
private Node createIncDec(int nodeType, boolean post, Node child)
{
child = makeReference(child);
int childType = child.getType();
switch (childType) {
case Token.NAME:
case Token.GETPROP:
case Token.GETELEM:
case Token.GET_REF: {
Node n = new Node(nodeType, child);
int incrDecrMask = 0;
if (nodeType == Token.DEC) {
incrDecrMask |= Node.DECR_FLAG;
}
if (post) {
incrDecrMask |= Node.POST_FLAG;
}
n.putIntProp(Node.INCRDECR_PROP, incrDecrMask);
return n;
}
}
throw Kit.codeBug();
}
private Node createPropertyGet(Node target, String namespace, String name,
int memberTypeFlags)
{
if (namespace == null && memberTypeFlags == 0) {
if (target == null) {
return createName(name);
}
checkActivationName(name, Token.GETPROP);
if (ScriptRuntime.isSpecialProperty(name)) {
Node ref = new Node(Token.REF_SPECIAL, target);
ref.putProp(Node.NAME_PROP, name);
return new Node(Token.GET_REF, ref);
}
return new Node(Token.GETPROP, target, Node.newString(name));
}
Node elem = Node.newString(name);
memberTypeFlags |= Node.PROPERTY_FLAG;
return createMemberRefGet(target, namespace, elem, memberTypeFlags);
}
/**
* @param target the node before the LB
* @param namespace optional namespace
* @param elem the node in the brackets
* @param memberTypeFlags E4X flags
*/
private Node createElementGet(Node target, String namespace, Node elem,
int memberTypeFlags)
{
// OPT: could optimize to createPropertyGet
// iff elem is string that can not be number
if (namespace == null && memberTypeFlags == 0) {
// stand-alone [aaa] as primary expression is array literal
// declaration and should not come here!
if (target == null) throw Kit.codeBug();
return new Node(Token.GETELEM, target, elem);
}
return createMemberRefGet(target, namespace, elem, memberTypeFlags);
}
private Node createMemberRefGet(Node target, String namespace, Node elem,
int memberTypeFlags)
{
Node nsNode = null;
if (namespace != null) {
// See 11.1.2 in ECMA 357
if (namespace.equals("*")) {
nsNode = new Node(Token.NULL);
} else {
nsNode = createName(namespace);
}
}
Node ref;
if (target == null) {
if (namespace == null) {
ref = new Node(Token.REF_NAME, elem);
} else {
ref = new Node(Token.REF_NS_NAME, nsNode, elem);
}
} else {
if (namespace == null) {
ref = new Node(Token.REF_MEMBER, target, elem);
} else {
ref = new Node(Token.REF_NS_MEMBER, target, nsNode, elem);
}
}
if (memberTypeFlags != 0) {
ref.putIntProp(Node.MEMBER_TYPE_PROP, memberTypeFlags);
}
return new Node(Token.GET_REF, ref);
}
private Node createBinary(int nodeType, Node left, Node right) {
switch (nodeType) {
case Token.ADD:
// numerical addition and string concatenation
if (left.type == Token.STRING) {
String s2;
if (right.type == Token.STRING) {
s2 = right.getString();
} else if (right.type == Token.NUMBER) {
s2 = ScriptRuntime.numberToString(right.getDouble(), 10);
} else {
break;
}
String s1 = left.getString();
left.setString(s1.concat(s2));
return left;
} else if (left.type == Token.NUMBER) {
if (right.type == Token.NUMBER) {
left.setDouble(left.getDouble() + right.getDouble());
return left;
} else if (right.type == Token.STRING) {
String s1, s2;
s1 = ScriptRuntime.numberToString(left.getDouble(), 10);
s2 = right.getString();
right.setString(s1.concat(s2));
return right;
}
}
// can't do anything if we don't know both types - since
// 0 + object is supposed to call toString on the object and do
// string concantenation rather than addition
break;
case Token.SUB:
// numerical subtraction
if (left.type == Token.NUMBER) {
double ld = left.getDouble();
if (right.type == Token.NUMBER) {
//both numbers
left.setDouble(ld - right.getDouble());
return left;
} else if (ld == 0.0) {
// first 0: 0-x -> -x
return new Node(Token.NEG, right);
}
} else if (right.type == Token.NUMBER) {
if (right.getDouble() == 0.0) {
//second 0: x - 0 -> +x
// can not make simply x because x - 0 must be number
return new Node(Token.POS, left);
}
}
break;
case Token.MUL:
// numerical multiplication
if (left.type == Token.NUMBER) {
double ld = left.getDouble();
if (right.type == Token.NUMBER) {
//both numbers
left.setDouble(ld * right.getDouble());
return left;
} else if (ld == 1.0) {
// first 1: 1 * x -> +x
return new Node(Token.POS, right);
}
} else if (right.type == Token.NUMBER) {
if (right.getDouble() == 1.0) {
//second 1: x * 1 -> +x
// can not make simply x because x - 0 must be number
return new Node(Token.POS, left);
}
}
// can't do x*0: Infinity * 0 gives NaN, not 0
break;
case Token.DIV:
// number division
if (right.type == Token.NUMBER) {
double rd = right.getDouble();
if (left.type == Token.NUMBER) {
// both constants -- just divide, trust Java to handle x/0
left.setDouble(left.getDouble() / rd);
return left;
} else if (rd == 1.0) {
// second 1: x/1 -> +x
// not simply x to force number convertion
return new Node(Token.POS, left);
}
}
break;
case Token.AND: {
// Since x && y gives x, not false, when Boolean(x) is false,
// and y, not Boolean(y), when Boolean(x) is true, x && y
// can only be simplified if x is defined. See bug 309957.
int leftStatus = isAlwaysDefinedBoolean(left);
if (leftStatus == ALWAYS_FALSE_BOOLEAN) {
// if the first one is false, just return it
return left;
} else if (leftStatus == ALWAYS_TRUE_BOOLEAN) {
// if first is true, set to second
return right;
}
break;
}
case Token.OR: {
// Since x || y gives x, not true, when Boolean(x) is true,
// and y, not Boolean(y), when Boolean(x) is false, x || y
// can only be simplified if x is defined. See bug 309957.
int leftStatus = isAlwaysDefinedBoolean(left);
if (leftStatus == ALWAYS_TRUE_BOOLEAN) {
// if the first one is true, just return it
return left;
} else if (leftStatus == ALWAYS_FALSE_BOOLEAN) {
// if first is false, set to second
return right;
}
break;
}
}
return new Node(nodeType, left, right);
}
private Node createAssignment(int assignType, Node left, Node right)
{
Node ref = makeReference(left);
if (ref == null) {
if (left.getType() == Token.ARRAYLIT ||
left.getType() == Token.OBJECTLIT)
{
if (assignType != Token.ASSIGN) {
reportError("msg.bad.destruct.op");
return right;
}
return createDestructuringAssignment(-1, left, right);
}
reportError("msg.bad.assign.left");
return right;
}
left = ref;
int assignOp;
switch (assignType) {
case Token.ASSIGN:
return simpleAssignment(left, right);
case Token.ASSIGN_BITOR: assignOp = Token.BITOR; break;
case Token.ASSIGN_BITXOR: assignOp = Token.BITXOR; break;
case Token.ASSIGN_BITAND: assignOp = Token.BITAND; break;
case Token.ASSIGN_LSH: assignOp = Token.LSH; break;
case Token.ASSIGN_RSH: assignOp = Token.RSH; break;
case Token.ASSIGN_URSH: assignOp = Token.URSH; break;
case Token.ASSIGN_ADD: assignOp = Token.ADD; break;
case Token.ASSIGN_SUB: assignOp = Token.SUB; break;
case Token.ASSIGN_MUL: assignOp = Token.MUL; break;
case Token.ASSIGN_DIV: assignOp = Token.DIV; break;
case Token.ASSIGN_MOD: assignOp = Token.MOD; break;
default: throw Kit.codeBug();
}
int nodeType = left.getType();
switch (nodeType) {
case Token.NAME: {
Node op = new Node(assignOp, left, right);
Node lvalueLeft = Node.newString(Token.BINDNAME, left.getString());
return new Node(Token.SETNAME, lvalueLeft, op);
}
case Token.GETPROP:
case Token.GETELEM: {
Node obj = left.getFirstChild();
Node id = left.getLastChild();
int type = nodeType == Token.GETPROP
? Token.SETPROP_OP
: Token.SETELEM_OP;
Node opLeft = new Node(Token.USE_STACK);
Node op = new Node(assignOp, opLeft, right);
return new Node(type, obj, id, op);
}
case Token.GET_REF: {
ref = left.getFirstChild();
checkMutableReference(ref);
Node opLeft = new Node(Token.USE_STACK);
Node op = new Node(assignOp, opLeft, right);
return new Node(Token.SET_REF_OP, ref, op);
}
}
throw Kit.codeBug();
}
private Node createUseLocal(Node localBlock) {
if (Token.LOCAL_BLOCK != localBlock.getType()) throw Kit.codeBug();
Node result = new Node(Token.LOCAL_LOAD);
result.putProp(Node.LOCAL_BLOCK_PROP, localBlock);
return result;
}
private Jump makeJump(int type, Node target) {
Jump n = new Jump(type);
n.target = target;
return n;
}
private Node makeReference(Node node) {
int type = node.getType();
switch (type) {
case Token.NAME:
case Token.GETPROP:
case Token.GETELEM:
case Token.GET_REF:
return node;
case Token.CALL:
node.setType(Token.REF_CALL);
return new Node(Token.GET_REF, node);
}
// Signal caller to report error
return null;
}
// Check if Node always mean true or false in boolean context
private static int isAlwaysDefinedBoolean(Node node) {
switch (node.getType()) {
case Token.FALSE:
case Token.NULL:
return ALWAYS_FALSE_BOOLEAN;
case Token.TRUE:
return ALWAYS_TRUE_BOOLEAN;
case Token.NUMBER: {
double num = node.getDouble();
if (num == num && num != 0.0) {
return ALWAYS_TRUE_BOOLEAN;
} else {
return ALWAYS_FALSE_BOOLEAN;
}
}
}
return 0;
}
// Check if node is the target of a destructuring bind.
boolean isDestructuring(Node n) {
return n instanceof DestructuringForm
&& ((DestructuringForm)n).isDestructuring();
}
Node decompileFunctionHeader(FunctionNode fn) {
Node mexpr = null;
if (fn.getFunctionName() != null) {
decompiler.addName(fn.getName());
} else if (fn.getMemberExprNode() != null) {
mexpr = transform(fn.getMemberExprNode());
}
decompiler.addToken(Token.LP);
List<AstNode> params = fn.getParams();
for (int i = 0; i < params.size(); i++) {
decompile(params.get(i));
if (i < params.size() - 1) {
decompiler.addToken(Token.COMMA);
}
}
decompiler.addToken(Token.RP);
if (!fn.isExpressionClosure()) {
decompiler.addEOL(Token.LC);
}
return mexpr;
}
void decompile(AstNode node) {
switch (node.getType()) {
case Token.ARRAYLIT:
decompileArrayLiteral((ArrayLiteral)node);
break;
case Token.OBJECTLIT:
decompileObjectLiteral((ObjectLiteral)node);
break;
case Token.STRING:
decompiler.addString(((StringLiteral)node).getValue());
break;
case Token.NAME:
decompiler.addName(((Name)node).getIdentifier());
break;
case Token.NUMBER:
decompiler.addNumber(((NumberLiteral)node).getNumber());
break;
case Token.GETPROP:
decompilePropertyGet((PropertyGet)node);
break;
case Token.EMPTY:
break;
case Token.GETELEM:
decompileElementGet((ElementGet) node);
break;
default:
Kit.codeBug("unexpected token: "
+ Token.typeToName(node.getType()));
}
}
// used for destructuring forms, since we don't transform() them
void decompileArrayLiteral(ArrayLiteral node) {
decompiler.addToken(Token.LB);
List<AstNode> elems = node.getElements();
int size = elems.size();
for (int i = 0; i < size; i++) {
AstNode elem = elems.get(i);
decompile(elem);
if (i < size - 1) {
decompiler.addToken(Token.COMMA);
}
}
decompiler.addToken(Token.RB);
}
// only used for destructuring forms
void decompileObjectLiteral(ObjectLiteral node) {
decompiler.addToken(Token.LC);
List<ObjectProperty> props = node.getElements();
int size = props.size();
for (int i = 0; i < size; i++) {
ObjectProperty prop = props.get(i);
boolean destructuringShorthand =
Boolean.TRUE.equals(prop.getProp(Node.DESTRUCTURING_SHORTHAND));
decompile(prop.getLeft());
if (!destructuringShorthand) {
decompiler.addToken(Token.COLON);
decompile(prop.getRight());
}
if (i < size - 1) {
decompiler.addToken(Token.COMMA);
}
}
decompiler.addToken(Token.RC);
}
// only used for destructuring forms
void decompilePropertyGet(PropertyGet node) {
decompile(node.getTarget());
decompiler.addToken(Token.DOT);
decompile(node.getProperty());
}
// only used for destructuring forms
void decompileElementGet(ElementGet node) {
decompile(node.getTarget());
decompiler.addToken(Token.LB);
decompile(node.getElement());
decompiler.addToken(Token.RB);
}
}
|
src/org/mozilla/javascript/IRFactory.java
|
/* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Norris Boyd
* Igor Bukanov
* Ethan Hugg
* Bob Jervis
* Terry Lucas
* Milen Nankov
* Steve Yegge
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package org.mozilla.javascript;
import org.mozilla.javascript.ast.*;
import java.util.List;
import java.util.ArrayList;
/**
* This class rewrites the parse tree into an IR suitable for codegen.
*
* @see Node
* @author Mike McCabe
* @author Norris Boyd
*/
public final class IRFactory extends Parser
{
private static final int LOOP_DO_WHILE = 0;
private static final int LOOP_WHILE = 1;
private static final int LOOP_FOR = 2;
private static final int ALWAYS_TRUE_BOOLEAN = 1;
private static final int ALWAYS_FALSE_BOOLEAN = -1;
private Decompiler decompiler = new Decompiler();
public IRFactory() {
super();
}
public IRFactory(CompilerEnvirons env) {
this(env, env.getErrorReporter());
}
public IRFactory(CompilerEnvirons env, ErrorReporter errorReporter) {
super(env, errorReporter);
}
/**
* Transforms the tree into a lower-level IR suitable for codegen.
* Optionally generates the encoded source.
*/
public ScriptNode transformTree(AstRoot root) {
currentScriptOrFn = root;
int sourceStartOffset = decompiler.getCurrentOffset();
if (Token.printTrees) {
System.out.println("IRFactory.transformTree");
System.out.println(root.debugPrint());
}
ScriptNode script = (ScriptNode)transform(root);
int sourceEndOffset = decompiler.getCurrentOffset();
script.setEncodedSourceBounds(sourceStartOffset,
sourceEndOffset);
if (compilerEnv.isGeneratingSource()) {
script.setEncodedSource(decompiler.getEncodedSource());
}
decompiler = null;
return script;
}
// Might want to convert this to polymorphism - move transform*
// functions into the AstNode subclasses. OTOH that would make
// IR transformation part of the public AST API - desirable?
// Another possibility: create AstTransformer interface and adapter.
public Node transform(AstNode node) {
switch (node.getType()) {
case Token.ARRAYCOMP:
return transformArrayComp((ArrayComprehension)node);
case Token.ARRAYLIT:
return transformArrayLiteral((ArrayLiteral)node);
case Token.BLOCK:
return transformBlock(node);
case Token.BREAK:
return transformBreak((BreakStatement)node);
case Token.CALL:
return transformFunctionCall((FunctionCall)node);
case Token.CONTINUE:
return transformContinue((ContinueStatement)node);
case Token.DO:
return transformDoLoop((DoLoop)node);
case Token.EMPTY:
return node;
case Token.FOR:
if (node instanceof ForInLoop) {
return transformForInLoop((ForInLoop)node);
} else {
return transformForLoop((ForLoop)node);
}
case Token.FUNCTION:
return transformFunction((FunctionNode)node);
case Token.GETELEM:
return transformElementGet((ElementGet)node);
case Token.GETPROP:
return transformPropertyGet((PropertyGet)node);
case Token.HOOK:
return transformCondExpr((ConditionalExpression)node);
case Token.IF:
return transformIf((IfStatement)node);
case Token.TRUE:
case Token.FALSE:
case Token.THIS:
case Token.NULL:
case Token.DEBUGGER:
return transformLiteral(node);
case Token.NAME:
return transformName((Name)node);
case Token.NUMBER:
return transformNumber((NumberLiteral)node);
case Token.NEW:
return transformNewExpr((NewExpression)node);
case Token.OBJECTLIT:
return transformObjectLiteral((ObjectLiteral)node);
case Token.REGEXP:
return transformRegExp((RegExpLiteral)node);
case Token.RETURN:
return transformReturn((ReturnStatement)node);
case Token.SCRIPT:
return transformScript((ScriptNode)node);
case Token.STRING:
return transformString((StringLiteral)node);
case Token.SWITCH:
return transformSwitch((SwitchStatement)node);
case Token.THROW:
return transformThrow((ThrowStatement)node);
case Token.TRY:
return transformTry((TryStatement)node);
case Token.WHILE:
return transformWhileLoop((WhileLoop)node);
case Token.WITH:
return transformWith((WithStatement)node);
case Token.YIELD:
return transformYield((Yield)node);
default:
if (node instanceof ExpressionStatement) {
return transformExprStmt((ExpressionStatement)node);
}
if (node instanceof Assignment) {
return transformAssignment((Assignment)node);
}
if (node instanceof UnaryExpression) {
return transformUnary((UnaryExpression)node);
}
if (node instanceof XmlMemberGet) {
return transformXmlMemberGet((XmlMemberGet)node);
}
if (node instanceof InfixExpression) {
return transformInfix((InfixExpression)node);
}
if (node instanceof VariableDeclaration) {
return transformVariables((VariableDeclaration)node);
}
if (node instanceof ParenthesizedExpression) {
return transformParenExpr((ParenthesizedExpression)node);
}
if (node instanceof LabeledStatement) {
return transformLabeledStatement((LabeledStatement)node);
}
if (node instanceof LetNode) {
return transformLetNode((LetNode)node);
}
if (node instanceof XmlRef) {
return transformXmlRef((XmlRef)node);
}
if (node instanceof XmlLiteral) {
return transformXmlLiteral((XmlLiteral)node);
}
throw new IllegalArgumentException("Can't transform: " + node);
}
}
private Node transformArrayComp(ArrayComprehension node) {
// An array comprehension expression such as
//
// [expr for (x in foo) for each ([y, z] in bar) if (cond)]
//
// is rewritten approximately as
//
// new Scope(ARRAYCOMP) {
// new Node(BLOCK) {
// let tmp1 = new Array;
// for (let x in foo) {
// for each (let tmp2 in bar) {
// if (cond) {
// tmp1.push([y, z] = tmp2, expr);
// }
// }
// }
// }
// createName(tmp1)
// }
int lineno = node.getLineno();
Scope scopeNode = createScopeNode(Token.ARRAYCOMP, lineno);
String arrayName = currentScriptOrFn.getNextTempName();
pushScope(scopeNode);
try {
defineSymbol(Token.LET, arrayName, false);
Node block = new Node(Token.BLOCK, lineno);
Node newArray = createCallOrNew(Token.NEW, createName("Array"));
Node init = new Node(Token.EXPR_VOID,
createAssignment(Token.ASSIGN,
createName(arrayName),
newArray),
lineno);
block.addChildToBack(init);
block.addChildToBack(arrayCompTransformHelper(node, arrayName));
scopeNode.addChildToBack(block);
scopeNode.addChildToBack(createName(arrayName));
return scopeNode;
} finally {
popScope();
}
}
private Node arrayCompTransformHelper(ArrayComprehension node,
String arrayName) {
decompiler.addToken(Token.LB);
int lineno = node.getLineno();
Node expr = transform(node.getResult());
List<ArrayComprehensionLoop> loops = node.getLoops();
int numLoops = loops.size();
// Walk through loops, collecting and defining their iterator symbols.
Node[] iterators = new Node[numLoops];
Node[] iteratedObjs = new Node[numLoops];
for (int i = 0; i < numLoops; i++) {
ArrayComprehensionLoop acl = loops.get(i);
decompiler.addName(" ");
decompiler.addToken(Token.FOR);
if (acl.isForEach()) {
decompiler.addName("each ");
}
decompiler.addToken(Token.LP);
AstNode iter = acl.getIterator();
String name = null;
if (iter.getType() == Token.NAME) {
name = iter.getString();
decompiler.addName(name);
} else {
// destructuring assignment
decompile(iter);
name = currentScriptOrFn.getNextTempName();
defineSymbol(Token.LP, name, false);
expr = createBinary(Token.COMMA,
createAssignment(Token.ASSIGN,
iter,
createName(name)),
expr);
}
Node init = createName(name);
// Define as a let since we want the scope of the variable to
// be restricted to the array comprehension
defineSymbol(Token.LET, name, false);
iterators[i] = init;
decompiler.addToken(Token.IN);
iteratedObjs[i] = transform(acl.getIteratedObject());
decompiler.addToken(Token.RP);
}
// generate code for tmpArray.push(body)
Node call = createCallOrNew(Token.CALL,
createPropertyGet(createName(arrayName),
null,
"push", 0));
Node body = new Node(Token.EXPR_VOID, call, lineno);
if (node.getFilter() != null) {
decompiler.addName(" ");
decompiler.addToken(Token.IF);
decompiler.addToken(Token.LP);
body = createIf(transform(node.getFilter()), body, null, lineno);
decompiler.addToken(Token.RP);
}
// Now walk loops in reverse to build up the body statement.
int pushed = 0;
try {
for (int i = numLoops-1; i >= 0; i--) {
ArrayComprehensionLoop acl = loops.get(i);
Scope loop = createLoopNode(null, // no label
acl.getLineno());
pushScope(loop);
pushed++;
body = createForIn(Token.LET,
loop,
iterators[i],
iteratedObjs[i],
body,
acl.isForEach());
}
} finally {
for (int i = 0; i < pushed; i++) {
popScope();
}
}
decompiler.addToken(Token.RB);
// Now that we've accumulated any destructuring forms,
// add expr to the call node; it's pushed on each iteration.
call.addChildToBack(expr);
return body;
}
private Node transformArrayLiteral(ArrayLiteral node) {
if (node.isDestructuring()) {
return node;
}
decompiler.addToken(Token.LB);
List<AstNode> elems = node.getElements();
Node array = new Node(Token.ARRAYLIT);
List<Integer> skipIndexes = null;
for (int i = 0; i < elems.size(); ++i) {
AstNode elem = elems.get(i);
if (elem.getType() != Token.EMPTY) {
array.addChildToBack(transform(elem));
} else {
if (skipIndexes == null) {
skipIndexes = new ArrayList<Integer>();
}
skipIndexes.add(i);
}
if (i < elems.size() - 1)
decompiler.addToken(Token.COMMA);
}
decompiler.addToken(Token.RB);
array.putIntProp(Node.DESTRUCTURING_ARRAY_LENGTH,
node.getDestructuringLength());
if (skipIndexes != null) {
int[] skips = new int[skipIndexes.size()];
for (int i = 0; i < skipIndexes.size(); i++)
skips[i] = skipIndexes.get(i);
array.putProp(Node.SKIP_INDEXES_PROP, skips);
}
return array;
}
private Node transformAssignment(Assignment node) {
AstNode left = removeParens(node.getLeft());
Node target = null;
if (isDestructuring(left)) {
decompile(left);
target = left;
} else {
target = transform(left);
}
decompiler.addToken(node.getType());
return createAssignment(node.getType(),
target,
transform(node.getRight()));
}
private Node transformBlock(AstNode node) {
if (node instanceof Scope) {
pushScope((Scope)node);
}
try {
List<Node> kids = new ArrayList<Node>();
for (Node kid : node) {
kids.add(transform((AstNode)kid));
}
node.removeChildren();
for (Node kid : kids) {
node.addChildToBack(kid);
}
return node;
} finally {
if (node instanceof Scope) {
popScope();
}
}
}
private Node transformBreak(BreakStatement node) {
decompiler.addToken(Token.BREAK);
if (node.getBreakLabel() != null) {
decompiler.addName(node.getBreakLabel().getIdentifier());
}
decompiler.addEOL(Token.SEMI);
return node;
}
private Node transformCondExpr(ConditionalExpression node) {
Node test = transform(node.getTestExpression());
decompiler.addToken(Token.HOOK);
Node ifTrue = transform(node.getTrueExpression());
decompiler.addToken(Token.COLON);
Node ifFalse = transform(node.getFalseExpression());
return createCondExpr(test, ifTrue, ifFalse);
}
private Node transformContinue(ContinueStatement node) {
decompiler.addToken(Token.CONTINUE);
if (node.getLabel() != null) {
decompiler.addName(node.getLabel().getIdentifier());
}
decompiler.addEOL(Token.SEMI);
return node;
}
private Node transformDoLoop(DoLoop loop) {
loop.setType(Token.LOOP);
pushScope(loop);
try {
decompiler.addToken(Token.DO);
decompiler.addEOL(Token.LC);
Node body = transform(loop.getBody());
decompiler.addToken(Token.RC);
decompiler.addToken(Token.WHILE);
decompiler.addToken(Token.LP);
Node cond = transform(loop.getCondition());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.SEMI);
return createLoop(loop, LOOP_DO_WHILE,
body, cond, null, null);
} finally {
popScope();
}
}
private Node transformElementGet(ElementGet node) {
// OPT: could optimize to createPropertyGet
// iff elem is string that can not be number
Node target = transform(node.getTarget());
decompiler.addToken(Token.LB);
Node element = transform(node.getElement());
decompiler.addToken(Token.RB);
return new Node(Token.GETELEM, target, element);
}
private Node transformExprStmt(ExpressionStatement node) {
Node expr = transform(node.getExpression());
decompiler.addEOL(Token.SEMI);
return new Node(node.getType(), expr, node.getLineno());
}
private Node transformForInLoop(ForInLoop loop) {
decompiler.addToken(Token.FOR);
if (loop.isForEach())
decompiler.addName("each ");
decompiler.addToken(Token.LP);
loop.setType(Token.LOOP);
pushScope(loop);
try {
int declType = -1;
AstNode iter = loop.getIterator();
if (iter instanceof VariableDeclaration) {
declType = ((VariableDeclaration)iter).getType();
}
Node lhs = transform(iter);
decompiler.addToken(Token.IN);
Node obj = transform(loop.getIteratedObject());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node body = transform(loop.getBody());
decompiler.addEOL(Token.RC);
return createForIn(declType, loop, lhs, obj, body,
loop.isForEach());
} finally {
popScope();
}
}
private Node transformForLoop(ForLoop loop) {
decompiler.addToken(Token.FOR);
decompiler.addToken(Token.LP);
loop.setType(Token.LOOP);
// XXX: Can't use pushScope/popScope here since 'createFor' may split
// the scope
Scope savedScope = currentScope;
currentScope = loop;
try {
Node init = transform(loop.getInitializer());
decompiler.addToken(Token.SEMI);
Node test = transform(loop.getCondition());
decompiler.addToken(Token.SEMI);
Node incr = transform(loop.getIncrement());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node body = transform(loop.getBody());
decompiler.addEOL(Token.RC);
return createFor(loop, init, test, incr, body);
} finally {
currentScope = savedScope;
}
}
private Node transformFunction(FunctionNode fn) {
int functionType = fn.getFunctionType();
int start = decompiler.markFunctionStart(functionType);
Node mexpr = decompileFunctionHeader(fn);
int index = currentScriptOrFn.addFunction(fn);
PerFunctionVariables savedVars = new PerFunctionVariables(fn);
try {
// If we start needing to record much more codegen metadata during
// function parsing, we should lump it all into a helper class.
Node destructuring = (Node)fn.getProp(Node.DESTRUCTURING_PARAMS);
fn.removeProp(Node.DESTRUCTURING_PARAMS);
int lineno = fn.getBody().getLineno();
++nestingOfFunction; // only for body, not params
Node body = transform(fn.getBody());
if (!fn.isExpressionClosure()) {
decompiler.addToken(Token.RC);
}
fn.setEncodedSourceBounds(start, decompiler.markFunctionEnd(start));
if (functionType != FunctionNode.FUNCTION_EXPRESSION && !fn.isExpressionClosure()) {
// Add EOL only if function is not part of expression
// since it gets SEMI + EOL from Statement in that case
decompiler.addToken(Token.EOL);
}
if (destructuring != null) {
body.addChildToFront(new Node(Token.EXPR_VOID,
destructuring, lineno));
}
int syntheticType = fn.getFunctionType();
Node pn = initFunction(fn, index, body, syntheticType);
if (mexpr != null) {
pn = createAssignment(Token.ASSIGN, mexpr, pn);
if (syntheticType != FunctionNode.FUNCTION_EXPRESSION) {
pn = createExprStatementNoReturn(pn, fn.getLineno());
}
}
return pn;
} finally {
--nestingOfFunction;
savedVars.restore();
}
}
private Node transformFunctionCall(FunctionCall node) {
Node call = createCallOrNew(Token.CALL, transform(node.getTarget()));
call.setLineno(node.getLineno());
decompiler.addToken(Token.LP);
List<AstNode> args = node.getArguments();
for (int i = 0; i < args.size(); i++) {
AstNode arg = args.get(i);
call.addChildToBack(transform(arg));
if (i < args.size() - 1) {
decompiler.addToken(Token.COMMA);
}
}
decompiler.addToken(Token.RP);
return call;
}
private Node transformIf(IfStatement n) {
decompiler.addToken(Token.IF);
decompiler.addToken(Token.LP);
Node cond = transform(n.getCondition());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node ifTrue = transform(n.getThenPart());
Node ifFalse = null;
if (n.getElsePart() != null) {
decompiler.addToken(Token.RC);
decompiler.addToken(Token.ELSE);
decompiler.addEOL(Token.LC);
ifFalse = transform(n.getElsePart());
}
decompiler.addEOL(Token.RC);
return createIf(cond, ifTrue, ifFalse, n.getLineno());
}
private Node transformInfix(InfixExpression node) {
Node left = transform(node.getLeft());
decompiler.addToken(node.getType());
Node right = transform(node.getRight());
if (node instanceof XmlDotQuery) {
decompiler.addToken(Token.RP);
}
return createBinary(node.getType(), left, right);
}
private Node transformLabeledStatement(LabeledStatement ls) {
for (Label lb : ls.getLabels()) {
decompiler.addName(lb.getName());
decompiler.addEOL(Token.COLON);
}
Label label = ls.getFirstLabel();
Node statement = transform(ls.getStatement());
// Make a target and put it _after_ the statement node. Add in the
// LABEL node, so breaks get the right target.
Node breakTarget = Node.newTarget();
Node block = new Node(Token.BLOCK, label, statement, breakTarget);
label.target = breakTarget;
return block;
}
private Node transformLetNode(LetNode node) {
pushScope(node);
try {
decompiler.addToken(Token.LET);
decompiler.addToken(Token.LP);
Node vars = transformVariableInitializers(node.getVariables());
decompiler.addToken(Token.RP);
node.addChildToBack(vars);
boolean letExpr = node.getType() == Token.LETEXPR;
if (node.getBody() != null) {
if (letExpr) {
decompiler.addName(" ");
} else {
decompiler.addEOL(Token.LC);
}
node.addChildToBack(transform(node.getBody()));
if (!letExpr) {
decompiler.addEOL(Token.RC);
}
}
return node;
} finally {
popScope();
}
}
private Node transformLiteral(AstNode node) {
decompiler.addToken(node.getType());
return node;
}
private Node transformName(Name node) {
decompiler.addName(node.getIdentifier());
return node;
}
private Node transformNewExpr(NewExpression node) {
decompiler.addToken(Token.NEW);
Node nx = createCallOrNew(Token.NEW, transform(node.getTarget()));
nx.setLineno(node.getLineno());
List<AstNode> args = node.getArguments();
if (!args.isEmpty())
decompiler.addToken(Token.LP);
for (int i = 0; i < args.size(); i++) {
AstNode arg = args.get(i);
nx.addChildToBack(transform(arg));
if (i < args.size() - 1) {
decompiler.addToken(Token.COMMA);
}
}
if (!args.isEmpty())
decompiler.addToken(Token.RP);
if (node.getInitializer() != null) {
nx.addChildToBack(transformObjectLiteral(node.getInitializer()));
}
return nx;
}
private Node transformNumber(NumberLiteral node) {
decompiler.addNumber(node.getNumber());
return node;
}
private Node transformObjectLiteral(ObjectLiteral node) {
if (node.isDestructuring()) {
return node;
}
// createObjectLiteral rewrites its argument as object
// creation plus object property entries, so later compiler
// stages don't need to know about object literals.
decompiler.addToken(Token.LC);
List<ObjectProperty> elems = node.getElements();
Node object = new Node(Token.OBJECTLIT);
Object[] properties;
if (elems.isEmpty()) {
properties = ScriptRuntime.emptyArgs;
} else {
int size = elems.size(), i = 0;
properties = new Object[size];
for (ObjectProperty prop : elems) {
if (prop.isGetter()) {
decompiler.addToken(Token.GET);
} else if (prop.isSetter()) {
decompiler.addToken(Token.SET);
}
properties[i++] = getPropKey(prop.getLeft());
// OBJECTLIT is used as ':' in object literal for
// decompilation to solve spacing ambiguity.
if (!(prop.isGetter() || prop.isSetter())) {
decompiler.addToken(Token.OBJECTLIT);
}
Node right = transform(prop.getRight());
if (prop.isGetter()) {
right = createUnary(Token.GET, right);
} else if (prop.isSetter()) {
right = createUnary(Token.SET, right);
}
object.addChildToBack(right);
if (i < size) {
decompiler.addToken(Token.COMMA);
}
}
}
decompiler.addToken(Token.RC);
object.putProp(Node.OBJECT_IDS_PROP, properties);
return object;
}
private Object getPropKey(Node id) {
Object key;
if (id instanceof Name) {
String s = ((Name)id).getIdentifier();
decompiler.addName(s);
key = ScriptRuntime.getIndexObject(s);
} else if (id instanceof StringLiteral) {
String s = ((StringLiteral)id).getValue();
decompiler.addString(s);
key = ScriptRuntime.getIndexObject(s);
} else if (id instanceof NumberLiteral) {
double n = ((NumberLiteral)id).getNumber();
decompiler.addNumber(n);
key = ScriptRuntime.getIndexObject(n);
} else {
throw Kit.codeBug();
}
return key;
}
private Node transformParenExpr(ParenthesizedExpression node) {
AstNode expr = node.getExpression();
decompiler.addToken(Token.LP);
int count = 1;
while (expr instanceof ParenthesizedExpression) {
decompiler.addToken(Token.LP);
count++;
expr = ((ParenthesizedExpression)expr).getExpression();
}
Node result = transform(expr);
for (int i = 0; i < count; i++) {
decompiler.addToken(Token.RP);
}
result.putProp(Node.PARENTHESIZED_PROP, Boolean.TRUE);
return result;
}
private Node transformPropertyGet(PropertyGet node) {
Node target = transform(node.getTarget());
String name = node.getProperty().getIdentifier();
decompiler.addToken(Token.DOT);
decompiler.addName(name);
return createPropertyGet(target, null, name, 0);
}
private Node transformRegExp(RegExpLiteral node) {
decompiler.addRegexp(node.getValue(), node.getFlags());
currentScriptOrFn.addRegExp(node);
return node;
}
private Node transformReturn(ReturnStatement node) {
if (Boolean.TRUE.equals(node.getProp(Node.EXPRESSION_CLOSURE_PROP))) {
decompiler.addName(" ");
} else {
decompiler.addToken(Token.RETURN);
}
AstNode rv = node.getReturnValue();
Node value = rv == null ? null : transform(rv);
decompiler.addEOL(Token.SEMI);
return rv == null
? new Node(Token.RETURN, node.getLineno())
: new Node(Token.RETURN, value, node.getLineno());
}
private Node transformScript(ScriptNode node) {
decompiler.addToken(Token.SCRIPT);
if (currentScope != null) Kit.codeBug();
currentScope = node;
Node body = new Node(Token.BLOCK);
for (Node kid : node) {
body.addChildToBack(transform((AstNode)kid));
}
node.removeChildren();
Node children = body.getFirstChild();
if (children != null) {
node.addChildrenToBack(children);
}
return node;
}
private Node transformString(StringLiteral node) {
decompiler.addString(node.getValue());
return Node.newString(node.getValue());
}
private Node transformSwitch(SwitchStatement node) {
// The switch will be rewritten from:
//
// switch (expr) {
// case test1: statements1;
// ...
// default: statementsDefault;
// ...
// case testN: statementsN;
// }
//
// to:
//
// {
// switch (expr) {
// case test1: goto label1;
// ...
// case testN: goto labelN;
// }
// goto labelDefault;
// label1:
// statements1;
// ...
// labelDefault:
// statementsDefault;
// ...
// labelN:
// statementsN;
// breakLabel:
// }
//
// where inside switch each "break;" without label will be replaced
// by "goto breakLabel".
//
// If the original switch does not have the default label, then
// after the switch he transformed code would contain this goto:
// goto breakLabel;
// instead of:
// goto labelDefault;
decompiler.addToken(Token.SWITCH);
decompiler.addToken(Token.LP);
Node switchExpr = transform(node.getExpression());
decompiler.addToken(Token.RP);
node.addChildToBack(switchExpr);
Node block = new Node(Token.BLOCK, node, node.getLineno());
decompiler.addEOL(Token.LC);
for (SwitchCase sc : node.getCases()) {
AstNode expr = sc.getExpression();
Node caseExpr = null;
if (expr != null) {
decompiler.addToken(Token.CASE);
caseExpr = transform(expr);
} else {
decompiler.addToken(Token.DEFAULT);
}
decompiler.addEOL(Token.COLON);
List<AstNode> stmts = sc.getStatements();
Node body = new Block();
if (stmts != null) {
for (AstNode kid : stmts) {
body.addChildToBack(transform(kid));
}
}
addSwitchCase(block, caseExpr, body);
}
decompiler.addEOL(Token.RC);
closeSwitch(block);
return block;
}
private Node transformThrow(ThrowStatement node) {
decompiler.addToken(Token.THROW);
Node value = transform(node.getExpression());
decompiler.addEOL(Token.SEMI);
return new Node(Token.THROW, value, node.getLineno());
}
private Node transformTry(TryStatement node) {
decompiler.addToken(Token.TRY);
decompiler.addEOL(Token.LC);
Node tryBlock = transform(node.getTryBlock());
decompiler.addEOL(Token.RC);
Node catchBlocks = new Block();
for (CatchClause cc : node.getCatchClauses()) {
decompiler.addToken(Token.CATCH);
decompiler.addToken(Token.LP);
String varName = cc.getVarName().getIdentifier();
decompiler.addName(varName);
Node catchCond = null;
AstNode ccc = cc.getCatchCondition();
if (ccc != null) {
decompiler.addName(" ");
decompiler.addToken(Token.IF);
catchCond = transform(ccc);
} else {
catchCond = new EmptyExpression();
}
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node body = transform(cc.getBody());
decompiler.addEOL(Token.RC);
catchBlocks.addChildToBack(createCatch(varName, catchCond,
body, cc.getLineno()));
}
Node finallyBlock = null;
if (node.getFinallyBlock() != null) {
decompiler.addToken(Token.FINALLY);
decompiler.addEOL(Token.LC);
finallyBlock = transform(node.getFinallyBlock());
decompiler.addEOL(Token.RC);
}
return createTryCatchFinally(tryBlock, catchBlocks,
finallyBlock, node.getLineno());
}
private Node transformUnary(UnaryExpression node) {
int type = node.getType();
if (type == Token.DEFAULTNAMESPACE) {
return transformDefaultXmlNamepace(node);
}
if (node.isPrefix()) {
decompiler.addToken(type);
}
Node child = transform(node.getOperand());
if (node.isPostfix()) {
decompiler.addToken(type);
}
if (type == Token.INC || type == Token.DEC) {
return createIncDec(type, node.isPostfix(), child);
}
return createUnary(type, child);
}
private Node transformVariables(VariableDeclaration node) {
decompiler.addToken(node.getType());
transformVariableInitializers(node);
// Might be most robust to have parser record whether it was
// a variable declaration statement, possibly as a node property.
AstNode parent = node.getParent();
if (!(parent instanceof Loop)
&& !(parent instanceof LetNode)) {
decompiler.addEOL(Token.SEMI);
}
return node;
}
private Node transformVariableInitializers(VariableDeclaration node) {
List<VariableInitializer> vars = node.getVariables();
int size = vars.size(), i = 0;
for (VariableInitializer var : vars) {
AstNode target = var.getTarget();
AstNode init = var.getInitializer();
Node left = null;
if (var.isDestructuring()) {
decompile(target); // decompile but don't transform
left = target;
} else {
left = transform(target);
}
Node right = null;
if (init != null) {
decompiler.addToken(Token.ASSIGN);
right = transform(init);
}
if (var.isDestructuring()) {
if (right == null) { // TODO: should this ever happen?
node.addChildToBack(left);
} else {
Node d = createDestructuringAssignment(node.getType(),
left, right);
node.addChildToBack(d);
}
} else {
if (right != null) {
left.addChildToBack(right);
}
node.addChildToBack(left);
}
if (i++ < size-1) {
decompiler.addToken(Token.COMMA);
}
}
return node;
}
private Node transformWhileLoop(WhileLoop loop) {
decompiler.addToken(Token.WHILE);
loop.setType(Token.LOOP);
pushScope(loop);
try {
decompiler.addToken(Token.LP);
Node cond = transform(loop.getCondition());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node body = transform(loop.getBody());
decompiler.addEOL(Token.RC);
return createLoop(loop, LOOP_WHILE, body, cond, null, null);
} finally {
popScope();
}
}
private Node transformWith(WithStatement node) {
decompiler.addToken(Token.WITH);
decompiler.addToken(Token.LP);
Node expr = transform(node.getExpression());
decompiler.addToken(Token.RP);
decompiler.addEOL(Token.LC);
Node stmt = transform(node.getStatement());
decompiler.addEOL(Token.RC);
return createWith(expr, stmt, node.getLineno());
}
private Node transformYield(Yield node) {
decompiler.addToken(Token.YIELD);
Node kid = node.getValue() == null ? null : transform(node.getValue());
if (kid != null)
return new Node(Token.YIELD, kid, node.getLineno());
else
return new Node(Token.YIELD, node.getLineno());
}
private Node transformXmlLiteral(XmlLiteral node) {
// a literal like <foo>{bar}</foo> is rewritten as
// new XML("<foo>" + bar + "</foo>");
Node pnXML = new Node(Token.NEW, node.getLineno());
List<XmlFragment> frags = node.getFragments();
XmlString first = (XmlString)frags.get(0);
boolean anon = first.getXml().trim().startsWith("<>");
pnXML.addChildToBack(createName(anon ? "XMLList" : "XML"));
Node pn = null;
for (XmlFragment frag : frags) {
if (frag instanceof XmlString) {
String xml = ((XmlString)frag).getXml();
decompiler.addName(xml);
if (pn == null) {
pn = createString(xml);
} else {
pn = createBinary(Token.ADD, pn, createString(xml));
}
} else {
XmlExpression xexpr = (XmlExpression)frag;
boolean isXmlAttr = xexpr.isXmlAttribute();
Node expr;
decompiler.addToken(Token.LC);
if (xexpr.getExpression() instanceof EmptyExpression) {
expr = createString("");
} else {
expr = transform(xexpr.getExpression());
}
decompiler.addToken(Token.RC);
if (isXmlAttr) {
// Need to put the result in double quotes
expr = createUnary(Token.ESCXMLATTR, expr);
Node prepend = createBinary(Token.ADD,
createString("\""),
expr);
expr = createBinary(Token.ADD,
prepend,
createString("\""));
} else {
expr = createUnary(Token.ESCXMLTEXT, expr);
}
pn = createBinary(Token.ADD, pn, expr);
}
}
pnXML.addChildToBack(pn);
return pnXML;
}
private Node transformXmlMemberGet(XmlMemberGet node) {
XmlRef ref = node.getMemberRef();
Node pn = transform(node.getLeft());
int flags = ref.isAttributeAccess() ? Node.ATTRIBUTE_FLAG : 0;
if (node.getType() == Token.DOTDOT) {
flags |= Node.DESCENDANTS_FLAG;
decompiler.addToken(Token.DOTDOT);
} else {
decompiler.addToken(Token.DOT);
}
return transformXmlRef(pn, ref, flags);
}
// We get here if we weren't a child of a . or .. infix node
private Node transformXmlRef(XmlRef node) {
int memberTypeFlags = node.isAttributeAccess()
? Node.ATTRIBUTE_FLAG : 0;
return transformXmlRef(null, node, memberTypeFlags);
}
private Node transformXmlRef(Node pn, XmlRef node, int memberTypeFlags) {
if ((memberTypeFlags & Node.ATTRIBUTE_FLAG) != 0)
decompiler.addToken(Token.XMLATTR);
Name namespace = node.getNamespace();
String ns = namespace != null ? namespace.getIdentifier() : null;
if (ns != null) {
decompiler.addName(ns);
decompiler.addToken(Token.COLONCOLON);
}
if (node instanceof XmlPropRef) {
String name = ((XmlPropRef)node).getPropName().getIdentifier();
decompiler.addName(name);
return createPropertyGet(pn, ns, name, memberTypeFlags);
} else {
decompiler.addToken(Token.LB);
Node expr = transform(((XmlElemRef)node).getExpression());
decompiler.addToken(Token.RB);
return createElementGet(pn, ns, expr, memberTypeFlags);
}
}
private Node transformDefaultXmlNamepace(UnaryExpression node) {
decompiler.addToken(Token.DEFAULT);
decompiler.addName(" xml");
decompiler.addName(" namespace");
decompiler.addToken(Token.ASSIGN);
Node child = transform(node.getOperand());
return createUnary(Token.DEFAULTNAMESPACE, child);
}
/**
* If caseExpression argument is null it indicates a default label.
*/
private void addSwitchCase(Node switchBlock, Node caseExpression,
Node statements)
{
if (switchBlock.getType() != Token.BLOCK) throw Kit.codeBug();
Jump switchNode = (Jump)switchBlock.getFirstChild();
if (switchNode.getType() != Token.SWITCH) throw Kit.codeBug();
Node gotoTarget = Node.newTarget();
if (caseExpression != null) {
Jump caseNode = new Jump(Token.CASE, caseExpression);
caseNode.target = gotoTarget;
switchNode.addChildToBack(caseNode);
} else {
switchNode.setDefault(gotoTarget);
}
switchBlock.addChildToBack(gotoTarget);
switchBlock.addChildToBack(statements);
}
private void closeSwitch(Node switchBlock)
{
if (switchBlock.getType() != Token.BLOCK) throw Kit.codeBug();
Jump switchNode = (Jump)switchBlock.getFirstChild();
if (switchNode.getType() != Token.SWITCH) throw Kit.codeBug();
Node switchBreakTarget = Node.newTarget();
// switchNode.target is only used by NodeTransformer
// to detect switch end
switchNode.target = switchBreakTarget;
Node defaultTarget = switchNode.getDefault();
if (defaultTarget == null) {
defaultTarget = switchBreakTarget;
}
switchBlock.addChildAfter(makeJump(Token.GOTO, defaultTarget),
switchNode);
switchBlock.addChildToBack(switchBreakTarget);
}
private Node createExprStatementNoReturn(Node expr, int lineno) {
return new Node(Token.EXPR_VOID, expr, lineno);
}
private Node createString(String string) {
return Node.newString(string);
}
/**
* Catch clause of try/catch/finally
* @param varName the name of the variable to bind to the exception
* @param catchCond the condition under which to catch the exception.
* May be null if no condition is given.
* @param stmts the statements in the catch clause
* @param lineno the starting line number of the catch clause
*/
private Node createCatch(String varName, Node catchCond, Node stmts,
int lineno) {
if (catchCond == null) {
catchCond = new Node(Token.EMPTY);
}
return new Node(Token.CATCH, createName(varName),
catchCond, stmts, lineno);
}
private Node initFunction(FunctionNode fnNode, int functionIndex,
Node statements, int functionType) {
fnNode.setFunctionType(functionType);
fnNode.addChildToBack(statements);
int functionCount = fnNode.getFunctionCount();
if (functionCount != 0) {
// Functions containing other functions require activation objects
fnNode.setRequiresActivation();
}
if (functionType == FunctionNode.FUNCTION_EXPRESSION) {
Name name = fnNode.getFunctionName();
if (name != null && name.length() != 0) {
// A function expression needs to have its name as a
// variable (if it isn't already allocated as a variable).
// See ECMA Ch. 13. We add code to the beginning of the
// function to initialize a local variable of the
// function's name to the function value.
Node setFn = new Node(Token.EXPR_VOID,
new Node(Token.SETNAME,
Node.newString(Token.BINDNAME,
name.getIdentifier()),
new Node(Token.THISFN)));
statements.addChildrenToFront(setFn);
}
}
// Add return to end if needed.
Node lastStmt = statements.getLastChild();
if (lastStmt == null || lastStmt.getType() != Token.RETURN) {
statements.addChildToBack(new Node(Token.RETURN));
}
Node result = Node.newString(Token.FUNCTION, fnNode.getName());
result.putIntProp(Node.FUNCTION_PROP, functionIndex);
return result;
}
/**
* Create loop node. The code generator will later call
* createWhile|createDoWhile|createFor|createForIn
* to finish loop generation.
*/
private Scope createLoopNode(Node loopLabel, int lineno) {
Scope result = createScopeNode(Token.LOOP, lineno);
if (loopLabel != null) {
((Jump)loopLabel).setLoop(result);
}
return result;
}
private Node createFor(Scope loop, Node init,
Node test, Node incr, Node body) {
if (init.getType() == Token.LET) {
// rewrite "for (let i=s; i < N; i++)..." as
// "let (i=s) { for (; i < N; i++)..." so that "s" is evaluated
// outside the scope of the for.
Scope let = Scope.splitScope(loop);
let.setType(Token.LET);
let.addChildrenToBack(init);
let.addChildToBack(createLoop(loop, LOOP_FOR, body, test,
new Node(Token.EMPTY), incr));
return let;
}
return createLoop(loop, LOOP_FOR, body, test, init, incr);
}
private Node createLoop(Jump loop, int loopType, Node body,
Node cond, Node init, Node incr)
{
Node bodyTarget = Node.newTarget();
Node condTarget = Node.newTarget();
if (loopType == LOOP_FOR && cond.getType() == Token.EMPTY) {
cond = new Node(Token.TRUE);
}
Jump IFEQ = new Jump(Token.IFEQ, cond);
IFEQ.target = bodyTarget;
Node breakTarget = Node.newTarget();
loop.addChildToBack(bodyTarget);
loop.addChildrenToBack(body);
if (loopType == LOOP_WHILE || loopType == LOOP_FOR) {
// propagate lineno to condition
loop.addChildrenToBack(new Node(Token.EMPTY, loop.getLineno()));
}
loop.addChildToBack(condTarget);
loop.addChildToBack(IFEQ);
loop.addChildToBack(breakTarget);
loop.target = breakTarget;
Node continueTarget = condTarget;
if (loopType == LOOP_WHILE || loopType == LOOP_FOR) {
// Just add a GOTO to the condition in the do..while
loop.addChildToFront(makeJump(Token.GOTO, condTarget));
if (loopType == LOOP_FOR) {
int initType = init.getType();
if (initType != Token.EMPTY) {
if (initType != Token.VAR && initType != Token.LET) {
init = new Node(Token.EXPR_VOID, init);
}
loop.addChildToFront(init);
}
Node incrTarget = Node.newTarget();
loop.addChildAfter(incrTarget, body);
if (incr.getType() != Token.EMPTY) {
incr = new Node(Token.EXPR_VOID, incr);
loop.addChildAfter(incr, incrTarget);
}
continueTarget = incrTarget;
}
}
loop.setContinue(continueTarget);
return loop;
}
/**
* Generate IR for a for..in loop.
*/
private Node createForIn(int declType, Node loop, Node lhs,
Node obj, Node body, boolean isForEach)
{
int destructuring = -1;
int destructuringLen = 0;
Node lvalue;
int type = lhs.getType();
if (type == Token.VAR || type == Token.LET) {
Node kid = lhs.getLastChild();
int kidType = kid.getType();
if (kidType == Token.ARRAYLIT || kidType == Token.OBJECTLIT)
{
type = destructuring = kidType;
lvalue = kid;
destructuringLen = 0;
if (kid instanceof ArrayLiteral)
destructuringLen = ((ArrayLiteral) kid).getDestructuringLength();
} else if (kidType == Token.NAME) {
lvalue = Node.newString(Token.NAME, kid.getString());
} else {
reportError("msg.bad.for.in.lhs");
return null;
}
} else if (type == Token.ARRAYLIT || type == Token.OBJECTLIT) {
destructuring = type;
lvalue = lhs;
destructuringLen = 0;
if (lhs instanceof ArrayLiteral)
destructuringLen = ((ArrayLiteral) lhs).getDestructuringLength();
} else {
lvalue = makeReference(lhs);
if (lvalue == null) {
reportError("msg.bad.for.in.lhs");
return null;
}
}
Node localBlock = new Node(Token.LOCAL_BLOCK);
int initType = isForEach ? Token.ENUM_INIT_VALUES
: (destructuring != -1
? Token.ENUM_INIT_ARRAY
: Token.ENUM_INIT_KEYS);
Node init = new Node(initType, obj);
init.putProp(Node.LOCAL_BLOCK_PROP, localBlock);
Node cond = new Node(Token.ENUM_NEXT);
cond.putProp(Node.LOCAL_BLOCK_PROP, localBlock);
Node id = new Node(Token.ENUM_ID);
id.putProp(Node.LOCAL_BLOCK_PROP, localBlock);
Node newBody = new Node(Token.BLOCK);
Node assign;
if (destructuring != -1) {
assign = createDestructuringAssignment(declType, lvalue, id);
if (!isForEach && (destructuring == Token.OBJECTLIT ||
destructuringLen != 2))
{
// destructuring assignment is only allowed in for..each or
// with an array type of length 2 (to hold key and value)
reportError("msg.bad.for.in.destruct");
}
} else {
assign = simpleAssignment(lvalue, id);
}
newBody.addChildToBack(new Node(Token.EXPR_VOID, assign));
newBody.addChildToBack(body);
loop = createLoop((Jump)loop, LOOP_WHILE, newBody, cond, null, null);
loop.addChildToFront(init);
if (type == Token.VAR || type == Token.LET)
loop.addChildToFront(lhs);
localBlock.addChildToBack(loop);
return localBlock;
}
/**
* Try/Catch/Finally
*
* The IRFactory tries to express as much as possible in the tree;
* the responsibilities remaining for Codegen are to add the Java
* handlers: (Either (but not both) of TARGET and FINALLY might not
* be defined)
*
* - a catch handler for javascript exceptions that unwraps the
* exception onto the stack and GOTOes to the catch target
*
* - a finally handler
*
* ... and a goto to GOTO around these handlers.
*/
private Node createTryCatchFinally(Node tryBlock, Node catchBlocks,
Node finallyBlock, int lineno)
{
boolean hasFinally = (finallyBlock != null)
&& (finallyBlock.getType() != Token.BLOCK
|| finallyBlock.hasChildren());
// short circuit
if (tryBlock.getType() == Token.BLOCK && !tryBlock.hasChildren()
&& !hasFinally)
{
return tryBlock;
}
boolean hasCatch = catchBlocks.hasChildren();
// short circuit
if (!hasFinally && !hasCatch) {
// bc finally might be an empty block...
return tryBlock;
}
Node handlerBlock = new Node(Token.LOCAL_BLOCK);
Jump pn = new Jump(Token.TRY, tryBlock, lineno);
pn.putProp(Node.LOCAL_BLOCK_PROP, handlerBlock);
if (hasCatch) {
// jump around catch code
Node endCatch = Node.newTarget();
pn.addChildToBack(makeJump(Token.GOTO, endCatch));
// make a TARGET for the catch that the tcf node knows about
Node catchTarget = Node.newTarget();
pn.target = catchTarget;
// mark it
pn.addChildToBack(catchTarget);
//
// Given
//
// try {
// tryBlock;
// } catch (e if condition1) {
// something1;
// ...
//
// } catch (e if conditionN) {
// somethingN;
// } catch (e) {
// somethingDefault;
// }
//
// rewrite as
//
// try {
// tryBlock;
// goto after_catch:
// } catch (x) {
// with (newCatchScope(e, x)) {
// if (condition1) {
// something1;
// goto after_catch;
// }
// }
// ...
// with (newCatchScope(e, x)) {
// if (conditionN) {
// somethingN;
// goto after_catch;
// }
// }
// with (newCatchScope(e, x)) {
// somethingDefault;
// goto after_catch;
// }
// }
// after_catch:
//
// If there is no default catch, then the last with block
// arround "somethingDefault;" is replaced by "rethrow;"
// It is assumed that catch handler generation will store
// exeception object in handlerBlock register
// Block with local for exception scope objects
Node catchScopeBlock = new Node(Token.LOCAL_BLOCK);
// expects catchblocks children to be (cond block) pairs.
Node cb = catchBlocks.getFirstChild();
boolean hasDefault = false;
int scopeIndex = 0;
while (cb != null) {
int catchLineNo = cb.getLineno();
Node name = cb.getFirstChild();
Node cond = name.getNext();
Node catchStatement = cond.getNext();
cb.removeChild(name);
cb.removeChild(cond);
cb.removeChild(catchStatement);
// Add goto to the catch statement to jump out of catch
// but prefix it with LEAVEWITH since try..catch produces
// "with"code in order to limit the scope of the exception
// object.
catchStatement.addChildToBack(new Node(Token.LEAVEWITH));
catchStatement.addChildToBack(makeJump(Token.GOTO, endCatch));
// Create condition "if" when present
Node condStmt;
if (cond.getType() == Token.EMPTY) {
condStmt = catchStatement;
hasDefault = true;
} else {
condStmt = createIf(cond, catchStatement, null,
catchLineNo);
}
// Generate code to create the scope object and store
// it in catchScopeBlock register
Node catchScope = new Node(Token.CATCH_SCOPE, name,
createUseLocal(handlerBlock));
catchScope.putProp(Node.LOCAL_BLOCK_PROP, catchScopeBlock);
catchScope.putIntProp(Node.CATCH_SCOPE_PROP, scopeIndex);
catchScopeBlock.addChildToBack(catchScope);
// Add with statement based on catch scope object
catchScopeBlock.addChildToBack(
createWith(createUseLocal(catchScopeBlock), condStmt,
catchLineNo));
// move to next cb
cb = cb.getNext();
++scopeIndex;
}
pn.addChildToBack(catchScopeBlock);
if (!hasDefault) {
// Generate code to rethrow if no catch clause was executed
Node rethrow = new Node(Token.RETHROW);
rethrow.putProp(Node.LOCAL_BLOCK_PROP, handlerBlock);
pn.addChildToBack(rethrow);
}
pn.addChildToBack(endCatch);
}
if (hasFinally) {
Node finallyTarget = Node.newTarget();
pn.setFinally(finallyTarget);
// add jsr finally to the try block
pn.addChildToBack(makeJump(Token.JSR, finallyTarget));
// jump around finally code
Node finallyEnd = Node.newTarget();
pn.addChildToBack(makeJump(Token.GOTO, finallyEnd));
pn.addChildToBack(finallyTarget);
Node fBlock = new Node(Token.FINALLY, finallyBlock);
fBlock.putProp(Node.LOCAL_BLOCK_PROP, handlerBlock);
pn.addChildToBack(fBlock);
pn.addChildToBack(finallyEnd);
}
handlerBlock.addChildToBack(pn);
return handlerBlock;
}
private Node createWith(Node obj, Node body, int lineno) {
setRequiresActivation();
Node result = new Node(Token.BLOCK, lineno);
result.addChildToBack(new Node(Token.ENTERWITH, obj));
Node bodyNode = new Node(Token.WITH, body, lineno);
result.addChildrenToBack(bodyNode);
result.addChildToBack(new Node(Token.LEAVEWITH));
return result;
}
private Node createIf(Node cond, Node ifTrue, Node ifFalse, int lineno)
{
int condStatus = isAlwaysDefinedBoolean(cond);
if (condStatus == ALWAYS_TRUE_BOOLEAN) {
return ifTrue;
} else if (condStatus == ALWAYS_FALSE_BOOLEAN) {
if (ifFalse != null) {
return ifFalse;
}
// Replace if (false) xxx by empty block
return new Node(Token.BLOCK, lineno);
}
Node result = new Node(Token.BLOCK, lineno);
Node ifNotTarget = Node.newTarget();
Jump IFNE = new Jump(Token.IFNE, cond);
IFNE.target = ifNotTarget;
result.addChildToBack(IFNE);
result.addChildrenToBack(ifTrue);
if (ifFalse != null) {
Node endTarget = Node.newTarget();
result.addChildToBack(makeJump(Token.GOTO, endTarget));
result.addChildToBack(ifNotTarget);
result.addChildrenToBack(ifFalse);
result.addChildToBack(endTarget);
} else {
result.addChildToBack(ifNotTarget);
}
return result;
}
private Node createCondExpr(Node cond, Node ifTrue, Node ifFalse) {
int condStatus = isAlwaysDefinedBoolean(cond);
if (condStatus == ALWAYS_TRUE_BOOLEAN) {
return ifTrue;
} else if (condStatus == ALWAYS_FALSE_BOOLEAN) {
return ifFalse;
}
return new Node(Token.HOOK, cond, ifTrue, ifFalse);
}
private Node createUnary(int nodeType, Node child)
{
int childType = child.getType();
switch (nodeType) {
case Token.DELPROP: {
Node n;
if (childType == Token.NAME) {
// Transform Delete(Name "a")
// to Delete(Bind("a"), String("a"))
child.setType(Token.BINDNAME);
Node left = child;
Node right = Node.newString(child.getString());
n = new Node(nodeType, left, right);
} else if (childType == Token.GETPROP ||
childType == Token.GETELEM)
{
Node left = child.getFirstChild();
Node right = child.getLastChild();
child.removeChild(left);
child.removeChild(right);
n = new Node(nodeType, left, right);
} else if (childType == Token.GET_REF) {
Node ref = child.getFirstChild();
child.removeChild(ref);
n = new Node(Token.DEL_REF, ref);
} else {
n = new Node(Token.TRUE);
}
return n;
}
case Token.TYPEOF:
if (childType == Token.NAME) {
child.setType(Token.TYPEOFNAME);
return child;
}
break;
case Token.BITNOT:
if (childType == Token.NUMBER) {
int value = ScriptRuntime.toInt32(child.getDouble());
child.setDouble(~value);
return child;
}
break;
case Token.NEG:
if (childType == Token.NUMBER) {
child.setDouble(-child.getDouble());
return child;
}
break;
case Token.NOT: {
int status = isAlwaysDefinedBoolean(child);
if (status != 0) {
int type;
if (status == ALWAYS_TRUE_BOOLEAN) {
type = Token.FALSE;
} else {
type = Token.TRUE;
}
if (childType == Token.TRUE || childType == Token.FALSE) {
child.setType(type);
return child;
}
return new Node(type);
}
break;
}
}
return new Node(nodeType, child);
}
private Node createCallOrNew(int nodeType, Node child) {
int type = Node.NON_SPECIALCALL;
if (child.getType() == Token.NAME) {
String name = child.getString();
if (name.equals("eval")) {
type = Node.SPECIALCALL_EVAL;
} else if (name.equals("With")) {
type = Node.SPECIALCALL_WITH;
}
} else if (child.getType() == Token.GETPROP) {
String name = child.getLastChild().getString();
if (name.equals("eval")) {
type = Node.SPECIALCALL_EVAL;
}
}
Node node = new Node(nodeType, child);
if (type != Node.NON_SPECIALCALL) {
// Calls to these functions require activation objects.
setRequiresActivation();
node.putIntProp(Node.SPECIALCALL_PROP, type);
}
return node;
}
private Node createIncDec(int nodeType, boolean post, Node child)
{
child = makeReference(child);
int childType = child.getType();
switch (childType) {
case Token.NAME:
case Token.GETPROP:
case Token.GETELEM:
case Token.GET_REF: {
Node n = new Node(nodeType, child);
int incrDecrMask = 0;
if (nodeType == Token.DEC) {
incrDecrMask |= Node.DECR_FLAG;
}
if (post) {
incrDecrMask |= Node.POST_FLAG;
}
n.putIntProp(Node.INCRDECR_PROP, incrDecrMask);
return n;
}
}
throw Kit.codeBug();
}
private Node createPropertyGet(Node target, String namespace, String name,
int memberTypeFlags)
{
if (namespace == null && memberTypeFlags == 0) {
if (target == null) {
return createName(name);
}
checkActivationName(name, Token.GETPROP);
if (ScriptRuntime.isSpecialProperty(name)) {
Node ref = new Node(Token.REF_SPECIAL, target);
ref.putProp(Node.NAME_PROP, name);
return new Node(Token.GET_REF, ref);
}
return new Node(Token.GETPROP, target, Node.newString(name));
}
Node elem = Node.newString(name);
memberTypeFlags |= Node.PROPERTY_FLAG;
return createMemberRefGet(target, namespace, elem, memberTypeFlags);
}
/**
* @param target the node before the LB
* @param namespace optional namespace
* @param elem the node in the brackets
* @param memberTypeFlags E4X flags
*/
private Node createElementGet(Node target, String namespace, Node elem,
int memberTypeFlags)
{
// OPT: could optimize to createPropertyGet
// iff elem is string that can not be number
if (namespace == null && memberTypeFlags == 0) {
// stand-alone [aaa] as primary expression is array literal
// declaration and should not come here!
if (target == null) throw Kit.codeBug();
return new Node(Token.GETELEM, target, elem);
}
return createMemberRefGet(target, namespace, elem, memberTypeFlags);
}
private Node createMemberRefGet(Node target, String namespace, Node elem,
int memberTypeFlags)
{
Node nsNode = null;
if (namespace != null) {
// See 11.1.2 in ECMA 357
if (namespace.equals("*")) {
nsNode = new Node(Token.NULL);
} else {
nsNode = createName(namespace);
}
}
Node ref;
if (target == null) {
if (namespace == null) {
ref = new Node(Token.REF_NAME, elem);
} else {
ref = new Node(Token.REF_NS_NAME, nsNode, elem);
}
} else {
if (namespace == null) {
ref = new Node(Token.REF_MEMBER, target, elem);
} else {
ref = new Node(Token.REF_NS_MEMBER, target, nsNode, elem);
}
}
if (memberTypeFlags != 0) {
ref.putIntProp(Node.MEMBER_TYPE_PROP, memberTypeFlags);
}
return new Node(Token.GET_REF, ref);
}
private Node createBinary(int nodeType, Node left, Node right) {
switch (nodeType) {
case Token.ADD:
// numerical addition and string concatenation
if (left.type == Token.STRING) {
String s2;
if (right.type == Token.STRING) {
s2 = right.getString();
} else if (right.type == Token.NUMBER) {
s2 = ScriptRuntime.numberToString(right.getDouble(), 10);
} else {
break;
}
String s1 = left.getString();
left.setString(s1.concat(s2));
return left;
} else if (left.type == Token.NUMBER) {
if (right.type == Token.NUMBER) {
left.setDouble(left.getDouble() + right.getDouble());
return left;
} else if (right.type == Token.STRING) {
String s1, s2;
s1 = ScriptRuntime.numberToString(left.getDouble(), 10);
s2 = right.getString();
right.setString(s1.concat(s2));
return right;
}
}
// can't do anything if we don't know both types - since
// 0 + object is supposed to call toString on the object and do
// string concantenation rather than addition
break;
case Token.SUB:
// numerical subtraction
if (left.type == Token.NUMBER) {
double ld = left.getDouble();
if (right.type == Token.NUMBER) {
//both numbers
left.setDouble(ld - right.getDouble());
return left;
} else if (ld == 0.0) {
// first 0: 0-x -> -x
return new Node(Token.NEG, right);
}
} else if (right.type == Token.NUMBER) {
if (right.getDouble() == 0.0) {
//second 0: x - 0 -> +x
// can not make simply x because x - 0 must be number
return new Node(Token.POS, left);
}
}
break;
case Token.MUL:
// numerical multiplication
if (left.type == Token.NUMBER) {
double ld = left.getDouble();
if (right.type == Token.NUMBER) {
//both numbers
left.setDouble(ld * right.getDouble());
return left;
} else if (ld == 1.0) {
// first 1: 1 * x -> +x
return new Node(Token.POS, right);
}
} else if (right.type == Token.NUMBER) {
if (right.getDouble() == 1.0) {
//second 1: x * 1 -> +x
// can not make simply x because x - 0 must be number
return new Node(Token.POS, left);
}
}
// can't do x*0: Infinity * 0 gives NaN, not 0
break;
case Token.DIV:
// number division
if (right.type == Token.NUMBER) {
double rd = right.getDouble();
if (left.type == Token.NUMBER) {
// both constants -- just divide, trust Java to handle x/0
left.setDouble(left.getDouble() / rd);
return left;
} else if (rd == 1.0) {
// second 1: x/1 -> +x
// not simply x to force number convertion
return new Node(Token.POS, left);
}
}
break;
case Token.AND: {
// Since x && y gives x, not false, when Boolean(x) is false,
// and y, not Boolean(y), when Boolean(x) is true, x && y
// can only be simplified if x is defined. See bug 309957.
int leftStatus = isAlwaysDefinedBoolean(left);
if (leftStatus == ALWAYS_FALSE_BOOLEAN) {
// if the first one is false, just return it
return left;
} else if (leftStatus == ALWAYS_TRUE_BOOLEAN) {
// if first is true, set to second
return right;
}
break;
}
case Token.OR: {
// Since x || y gives x, not true, when Boolean(x) is true,
// and y, not Boolean(y), when Boolean(x) is false, x || y
// can only be simplified if x is defined. See bug 309957.
int leftStatus = isAlwaysDefinedBoolean(left);
if (leftStatus == ALWAYS_TRUE_BOOLEAN) {
// if the first one is true, just return it
return left;
} else if (leftStatus == ALWAYS_FALSE_BOOLEAN) {
// if first is false, set to second
return right;
}
break;
}
}
return new Node(nodeType, left, right);
}
private Node createAssignment(int assignType, Node left, Node right)
{
Node ref = makeReference(left);
if (ref == null) {
if (left.getType() == Token.ARRAYLIT ||
left.getType() == Token.OBJECTLIT)
{
if (assignType != Token.ASSIGN) {
reportError("msg.bad.destruct.op");
return right;
}
return createDestructuringAssignment(-1, left, right);
}
reportError("msg.bad.assign.left");
return right;
}
left = ref;
int assignOp;
switch (assignType) {
case Token.ASSIGN:
return simpleAssignment(left, right);
case Token.ASSIGN_BITOR: assignOp = Token.BITOR; break;
case Token.ASSIGN_BITXOR: assignOp = Token.BITXOR; break;
case Token.ASSIGN_BITAND: assignOp = Token.BITAND; break;
case Token.ASSIGN_LSH: assignOp = Token.LSH; break;
case Token.ASSIGN_RSH: assignOp = Token.RSH; break;
case Token.ASSIGN_URSH: assignOp = Token.URSH; break;
case Token.ASSIGN_ADD: assignOp = Token.ADD; break;
case Token.ASSIGN_SUB: assignOp = Token.SUB; break;
case Token.ASSIGN_MUL: assignOp = Token.MUL; break;
case Token.ASSIGN_DIV: assignOp = Token.DIV; break;
case Token.ASSIGN_MOD: assignOp = Token.MOD; break;
default: throw Kit.codeBug();
}
int nodeType = left.getType();
switch (nodeType) {
case Token.NAME: {
Node op = new Node(assignOp, left, right);
Node lvalueLeft = Node.newString(Token.BINDNAME, left.getString());
return new Node(Token.SETNAME, lvalueLeft, op);
}
case Token.GETPROP:
case Token.GETELEM: {
Node obj = left.getFirstChild();
Node id = left.getLastChild();
int type = nodeType == Token.GETPROP
? Token.SETPROP_OP
: Token.SETELEM_OP;
Node opLeft = new Node(Token.USE_STACK);
Node op = new Node(assignOp, opLeft, right);
return new Node(type, obj, id, op);
}
case Token.GET_REF: {
ref = left.getFirstChild();
checkMutableReference(ref);
Node opLeft = new Node(Token.USE_STACK);
Node op = new Node(assignOp, opLeft, right);
return new Node(Token.SET_REF_OP, ref, op);
}
}
throw Kit.codeBug();
}
private Node createUseLocal(Node localBlock) {
if (Token.LOCAL_BLOCK != localBlock.getType()) throw Kit.codeBug();
Node result = new Node(Token.LOCAL_LOAD);
result.putProp(Node.LOCAL_BLOCK_PROP, localBlock);
return result;
}
private Jump makeJump(int type, Node target) {
Jump n = new Jump(type);
n.target = target;
return n;
}
private Node makeReference(Node node) {
int type = node.getType();
switch (type) {
case Token.NAME:
case Token.GETPROP:
case Token.GETELEM:
case Token.GET_REF:
return node;
case Token.CALL:
node.setType(Token.REF_CALL);
return new Node(Token.GET_REF, node);
}
// Signal caller to report error
return null;
}
// Check if Node always mean true or false in boolean context
private static int isAlwaysDefinedBoolean(Node node) {
switch (node.getType()) {
case Token.FALSE:
case Token.NULL:
return ALWAYS_FALSE_BOOLEAN;
case Token.TRUE:
return ALWAYS_TRUE_BOOLEAN;
case Token.NUMBER: {
double num = node.getDouble();
if (num == num && num != 0.0) {
return ALWAYS_TRUE_BOOLEAN;
} else {
return ALWAYS_FALSE_BOOLEAN;
}
}
}
return 0;
}
// Check if node is the target of a destructuring bind.
boolean isDestructuring(Node n) {
return n instanceof DestructuringForm
&& ((DestructuringForm)n).isDestructuring();
}
Node decompileFunctionHeader(FunctionNode fn) {
Node mexpr = null;
if (fn.getFunctionName() != null) {
decompiler.addName(fn.getName());
} else if (fn.getMemberExprNode() != null) {
mexpr = transform(fn.getMemberExprNode());
}
decompiler.addToken(Token.LP);
List<AstNode> params = fn.getParams();
for (int i = 0; i < params.size(); i++) {
decompile(params.get(i));
if (i < params.size() - 1) {
decompiler.addToken(Token.COMMA);
}
}
decompiler.addToken(Token.RP);
if (!fn.isExpressionClosure()) {
decompiler.addEOL(Token.LC);
}
return mexpr;
}
void decompile(AstNode node) {
switch (node.getType()) {
case Token.ARRAYLIT:
decompileArrayLiteral((ArrayLiteral)node);
break;
case Token.OBJECTLIT:
decompileObjectLiteral((ObjectLiteral)node);
break;
case Token.STRING:
decompiler.addString(((StringLiteral)node).getValue());
break;
case Token.NAME:
decompiler.addName(((Name)node).getIdentifier());
break;
case Token.NUMBER:
decompiler.addNumber(((NumberLiteral)node).getNumber());
break;
case Token.GETPROP:
decompilePropertyGet((PropertyGet)node);
break;
case Token.EMPTY:
break;
case Token.GETELEM:
decompileElementGet((ElementGet) node);
break;
default:
Kit.codeBug("unexpected token: "
+ Token.typeToName(node.getType()));
}
}
// used for destructuring forms, since we don't transform() them
void decompileArrayLiteral(ArrayLiteral node) {
decompiler.addToken(Token.LB);
List<AstNode> elems = node.getElements();
int size = elems.size();
for (int i = 0; i < size; i++) {
AstNode elem = elems.get(i);
decompile(elem);
if (i < size - 1) {
decompiler.addToken(Token.COMMA);
}
}
decompiler.addToken(Token.RB);
}
// only used for destructuring forms
void decompileObjectLiteral(ObjectLiteral node) {
decompiler.addToken(Token.LC);
List<ObjectProperty> props = node.getElements();
int size = props.size();
for (int i = 0; i < size; i++) {
ObjectProperty prop = props.get(i);
boolean destructuringShorthand =
Boolean.TRUE.equals(prop.getProp(Node.DESTRUCTURING_SHORTHAND));
decompile(prop.getLeft());
if (!destructuringShorthand) {
decompiler.addToken(Token.COLON);
decompile(prop.getRight());
}
if (i < size - 1) {
decompiler.addToken(Token.COMMA);
}
}
decompiler.addToken(Token.RC);
}
// only used for destructuring forms
void decompilePropertyGet(PropertyGet node) {
decompile(node.getTarget());
decompiler.addToken(Token.DOT);
decompile(node.getProperty());
}
// only used for destructuring forms
void decompileElementGet(ElementGet node) {
decompile(node.getTarget());
decompiler.addToken(Token.LB);
decompile(node.getElement());
decompiler.addToken(Token.RB);
}
}
|
Fix bug 508547 - delete statements should evaluate functions before attempting to delete the result
Patch from Raphael Speyer
|
src/org/mozilla/javascript/IRFactory.java
|
Fix bug 508547 - delete statements should evaluate functions before attempting to delete the result Patch from Raphael Speyer
|
|
Java
|
agpl-3.0
|
b70a2e2e3f07cb48779de2edb4aaf3cce9ea1524
| 0
|
VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb,VoltDB/voltdb
|
/* This file is part of VoltDB.
* Copyright (C) 2019 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.voltdb.task;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.After;
import org.junit.Test;
import org.voltdb.LocalClustersTestBase;
import org.voltdb.VoltTable;
import org.voltdb.client.Client;
import org.voltdb.client.NoConnectionsException;
import org.voltdb.client.ProcCallException;
import org.voltdb.compiler.VoltProjectBuilder;
public class TestTasksEnd2End extends LocalClustersTestBase {
private static final String s_userName = "TestUser";
private final VoltProjectBuilder m_builder = new VoltProjectBuilder();
@Override
public void setUp() throws Exception {
super.setUp();
String roleName = "TestRole";
m_builder.addRoles(new VoltProjectBuilder.RoleInfo[] {
new VoltProjectBuilder.RoleInfo(roleName, true, true, true, true, true, true) });
m_builder.addUsers(new VoltProjectBuilder.UserInfo[] {
new VoltProjectBuilder.UserInfo(s_userName, "password", new String[] { roleName }) });
configureClustersAndClients(Collections.singletonList(new ClusterConfiguration(4, 3, 1, m_builder)), 2, 2);
}
@After
public void cleanUpSchedules() {
try {
Client client = getClient(0);
VoltTable table = client.callProcedure("@SystemCatalog", "TASKS").getResults()[0];
StringBuilder sb = new StringBuilder();
while (table.advanceRow()) {
sb.append("DROP TASK ").append(table.getString("TASK_NAME")).append(';');
}
if (sb.length() != 0) {
client.callProcedure("@AdHoc", sb.toString());
}
} catch (Exception e) {
e.printStackTrace();
}
}
/*
* Test schedules running on the system.
*
* Create 3 schedules which insert a summary row into a table at different intervals
*/
@Test
public void systemSchedules() throws Exception {
Client client = getClient(0);
String summaryTable = getTableName(3, TableType.REPLICATED);
client.callProcedure("@AdHoc", "CREATE TABLE " + summaryTable
+ " (date TIMESTAMP NOT NULL, id INT NOT NULL, cnt BIGINT NOT NULL, key_sum DECIMAL NOT NULL, value_sum DECIMAL NOT NULL);");
AtomicReference<Exception> error = new AtomicReference<>();
Thread producer = new Thread() {
@Override
public void run() {
try {
while (true) {
insertRandomRows(client, 0, TableType.REPLICATED, 5);
Thread.sleep(1);
}
} catch (InterruptedException e) {
return;
} catch (Exception e) {
error.set(e);
}
};
};
producer.start();
String schedule1 = getMethodName() + "_1";
String schedule2 = getMethodName() + "_2";
String schedule3 = getMethodName() + "_3";
client.callProcedure("@AdHoc",
"CREATE PROCEDURE " + getMethodName() + " AS INSERT INTO " + summaryTable
+ " SELECT NOW, CAST(? AS INT), COUNT(*), SUM(CAST(key as DECIMAL)), SUM(CAST(value AS DECIMAL)) FROM "
+ getTableName(0, TableType.REPLICATED) + ';');
String summaryFormat = "CREATE TASK %s ON SCHEDULE %s PROCEDURE " + getMethodName()
+ " WITH (%d) ON ERROR IGNORE;";
client.callProcedure("@AdHoc", String.format(summaryFormat, schedule1, "delay 50 MILLISECONDS", 1));
client.callProcedure("@AdHoc", String.format(summaryFormat, schedule2, "CRON * * * * * *", 2));
client.callProcedure("@AdHoc", String.format(summaryFormat, schedule3, "EVERY 75 milliseconds", 3));
// Give everything some time to run
Thread.sleep(1000);
producer.interrupt();
producer.join();
VoltTable table = getTaskStats(client);
assertEquals(3, table.getRowCount());
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
}
client.callProcedure("@AdHoc", "ALTER TASK " + schedule1 + " DISABLE; ALTER TASK " + schedule2
+ " DISABLE; ALTER TASK " + schedule3 + " DISABLE;");
Thread.sleep(5);
table = getTaskStats(client);
assertEquals(3, table.getRowCount());
while (table.advanceRow()) {
String scheduleName = table.getString("NAME");
int id = -1;
if (schedule1.equalsIgnoreCase(scheduleName)) {
id = 1;
} else if (schedule2.equalsIgnoreCase(scheduleName)) {
id = 2;
} else if (schedule3.equalsIgnoreCase(scheduleName)) {
id = 3;
} else {
fail("Unknown schedule " + scheduleName);
}
assertEquals("DISABLED", table.getString("STATE"));
long summaryCount = client
.callProcedure("@AdHoc", "SELECT COUNT(*) FROM " + summaryTable + " WHERE id = " + id + ";")
.getResults()[0].asScalarLong();
long procedureInvocations = table.getLong("PROCEDURE_INVOCATIONS");
long successfulProcedureInvocations = procedureInvocations - table.getLong("PROCEDURE_FAILURES");
/*
* There can be one extra invocation since stats are done when the result comes back and a schedule can be
* stopped after procedure is called but before the result comes back
*/
assertTrue(
"Summary table has " + summaryCount + " rows. Invocation count is " + successfulProcedureInvocations
+ " for " + scheduleName,
summaryCount == successfulProcedureInvocations
|| summaryCount == successfulProcedureInvocations + 1);
long schedulerInvocations = table.getLong("SCHEDULER_INVOCATIONS");
assertTrue(
schedulerInvocations == procedureInvocations || schedulerInvocations == procedureInvocations + 1);
}
}
/*
* Test a schedule which runs on the partitions
*
* Create a simple schedule which prunes each partition down to 10 entries
*
* Test that partition failover works correctly
*/
@Test
public void partitionsSchedules() throws Exception {
Client client = getClient(0);
String tableName = getTableName(0, TableType.PARTITIONED);
String procName = getMethodName() + "_prune";
client.callProcedure("@AdHoc",
"CREATE PROCEDURE " + procName + " PARTITIONED AS DELETE FROM " + tableName
+ " ORDER BY key OFFSET 10");
AtomicReference<Exception> error = new AtomicReference<>();
Thread producer = new Thread() {
@Override
public void run() {
try {
while (true) {
insertRandomRows(client, 0, TableType.PARTITIONED, 10);
Thread.sleep(1);
}
} catch (InterruptedException e) {
return;
} catch (Exception e) {
error.set(e);
}
};
};
producer.start();
String schedule = getMethodName();
client.callProcedure("@AdHoc",
"CREATE TASK " + schedule + " ON SCHEDULE DELAY 10 MILLISECONDS PROCEDURE " + procName
+ " RUN ON PARTITIONS");
// Give everything some time to run
Thread.sleep(1000);
producer.interrupt();
producer.join();
VoltTable table = getTaskStats(client);
assertEquals(6, table.getRowCount());
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
}
Thread.sleep(15);
client.callProcedure("@AdHoc", "ALTER TASK " + schedule + " DISABLE;");
Thread.sleep(5);
table = getTaskStats(client);
assertEquals(6, table.getRowCount());
while (table.advanceRow()) {
assertEquals("DISABLED", table.getString("STATE"));
assertTrue("Scheduler invocation count is lower than expected: " + table.getLong("SCHEDULER_INVOCATIONS"),
table.getLong("SCHEDULER_INVOCATIONS") >= 50);
assertTrue("Procedure invocation count is lower than expected: " + table.getLong("PROCEDURE_INVOCATIONS"),
table.getLong("PROCEDURE_INVOCATIONS") >= 50);
}
assertEquals(60,
client.callProcedure("@AdHoc", "SELECT COUNT(*) FROM " + tableName).getResults()[0].asScalarLong());
// Test that partition schedules fail over
getCluster(0).killSingleHost(1);
table = getTaskStats(client);
assertEquals(6, table.getRowCount());
while (table.advanceRow()) {
assertEquals("DISABLED", table.getString("STATE"));
}
client.callProcedure("@AdHoc", "ALTER TASK " + schedule + " ENABLE;");
table = getTaskStats(client);
assertEquals(6, table.getRowCount());
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
}
try {
client.callProcedure("@AdHoc", "DROP PROCEDURE " + procName);
fail("Should not have been able to drop: " + procName);
} catch (ProcCallException e) {
String status = e.getClientResponse().getStatusString();
assertTrue(status, status.contains("Procedure does not exist: " + procName));
}
}
@Test
public void customScheduler() throws Exception {
Client client = getClient(0);
client.callProcedure("@AdHoc",
"CREATE TASK " + getMethodName() + " FROM CLASS " + CustomScheduler.class.getName()
+ " WITH (5, NULL);");
Thread.sleep(1000);
VoltTable table = getTaskStats(client);
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
assertNull(table.getString("SCHEDULER_STATUS"));
}
client.callProcedure("@AdHoc", "DROP TASK " + getMethodName());
client.callProcedure("@AdHoc",
"CREATE TASK " + getMethodName() + " FROM CLASS " + CustomScheduler.class.getName()
+ " WITH (5, 'STATUS');");
table = getTaskStats(client);
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
assertEquals("STATUS", table.getString("SCHEDULER_STATUS"));
}
}
/*
* Test creating tasks with AS USER clause
*/
@Test
public void tasksAsUser() throws Exception {
Client client = getClient(0);
String tableName = getTableName(0, TableType.PARTITIONED);
String procName = getMethodName() + "_prune";
client.callProcedure("@AdHoc",
"CREATE PROCEDURE " + procName + " PARTITIONED AS DELETE FROM " + tableName
+ " ORDER BY key OFFSET 10");
try {
client.callProcedure("@AdHoc",
"CREATE TASK " + getMethodName() + " ON SCHEDULE DELAY 5 MILLISECONDS PROCEDURE " + procName
+ " RUN ON PARTITIONS AS USER BOGUS_USER;");
fail("Should have failed to create a task with a bogus user");
} catch (ProcCallException e) {}
client.callProcedure("@AdHoc", "CREATE TASK " + getMethodName() + " ON SCHEDULE DELAY 5 MILLISECONDS PROCEDURE "
+ procName + " RUN ON PARTITIONS AS USER " + s_userName + ";");
Thread.sleep(500);
VoltTable table = getTaskStats(client);
assertEquals(6, table.getRowCount());
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
}
m_builder.clearUsers();
try {
getCluster(0).updateCatalog(m_builder);
fail("Should have failed to update the catalog since the user is in use");
} catch (ProcCallException e) {}
}
private static VoltTable getTaskStats(Client client)
throws NoConnectionsException, IOException, ProcCallException {
return client.callProcedure("@Statistics", "TASK", 0).getResults()[0];
}
public static class CustomScheduler implements ActionScheduler {
private int m_delayMs;
private String m_status;
public void initialize(int delayMs, String status) {
m_delayMs = delayMs;
m_status = status;
}
@Override
public DelayedAction getFirstDelayedAction() {
return getNextAction(null);
}
public DelayedAction getNextAction(ActionResult result) {
return DelayedAction.createCallback(m_delayMs, TimeUnit.MILLISECONDS, this::getNextAction)
.setStatusMessage(m_status);
}
}
}
|
tests/frontend/org/voltdb/task/TestTasksEnd2End.java
|
/* This file is part of VoltDB.
* Copyright (C) 2019 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.voltdb.task;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.After;
import org.junit.Test;
import org.voltdb.LocalClustersTestBase;
import org.voltdb.VoltTable;
import org.voltdb.client.Client;
import org.voltdb.client.NoConnectionsException;
import org.voltdb.client.ProcCallException;
import org.voltdb.compiler.VoltProjectBuilder;
public class TestTasksEnd2End extends LocalClustersTestBase {
private static final String s_userName = "TestUser";
private final VoltProjectBuilder m_builder = new VoltProjectBuilder();
@Override
public void setUp() throws Exception {
super.setUp();
String roleName = "TestRole";
m_builder.addRoles(new VoltProjectBuilder.RoleInfo[] {
new VoltProjectBuilder.RoleInfo(roleName, true, true, true, true, true, true) });
m_builder.addUsers(new VoltProjectBuilder.UserInfo[] {
new VoltProjectBuilder.UserInfo(s_userName, "password", new String[] { roleName }) });
configureClustersAndClients(Collections.singletonList(new ClusterConfiguration(4, 3, 1, m_builder)), 2, 2);
}
@After
public void cleanUpSchedules() {
try {
Client client = getClient(0);
VoltTable table = client.callProcedure("@SystemCatalog", "TASKS").getResults()[0];
StringBuilder sb = new StringBuilder();
while (table.advanceRow()) {
sb.append("DROP TASK ").append(table.getString("TASK_NAME")).append(';');
}
if (sb.length() != 0) {
client.callProcedure("@AdHoc", sb.toString());
}
} catch (Exception e) {
e.printStackTrace();
}
}
/*
* Test schedules running on the system.
*
* Create 3 schedules which insert a summary row into a table at different intervals
*/
@Test
public void systemSchedules() throws Exception {
Client client = getClient(0);
String summaryTable = getTableName(3, TableType.REPLICATED);
client.callProcedure("@AdHoc", "CREATE TABLE " + summaryTable
+ " (date TIMESTAMP NOT NULL, id INT NOT NULL, cnt BIGINT NOT NULL, key_sum DECIMAL NOT NULL, value_sum DECIMAL NOT NULL);");
AtomicReference<Exception> error = new AtomicReference<>();
Thread producer = new Thread() {
@Override
public void run() {
try {
while (true) {
insertRandomRows(client, 0, TableType.REPLICATED, 5);
Thread.sleep(1);
}
} catch (InterruptedException e) {
return;
} catch (Exception e) {
error.set(e);
}
};
};
producer.start();
String schedule1 = getMethodName() + "_1";
String schedule2 = getMethodName() + "_2";
String schedule3 = getMethodName() + "_3";
String summaryFormat = "CREATE TASK %s ON SCHEDULE %s PROCEDURE @AdHoc WITH ('INSERT INTO " + summaryTable
+ " SELECT NOW, %d, COUNT(*), SUM(CAST(key as DECIMAL)), SUM(CAST(value AS DECIMAL)) FROM "
+ getTableName(0, TableType.REPLICATED) + ";') ON ERROR IGNORE;";
client.callProcedure("@AdHoc", String.format(summaryFormat, schedule1, "delay 50 MILLISECONDS", 1));
client.callProcedure("@AdHoc", String.format(summaryFormat, schedule2, "CRON * * * * * *", 2));
client.callProcedure("@AdHoc", String.format(summaryFormat, schedule3, "EVERY 75 milliseconds", 3));
// Give everything some time to run
Thread.sleep(1000);
producer.interrupt();
producer.join();
VoltTable table = getTaskStats(client);
assertEquals(3, table.getRowCount());
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
}
client.callProcedure("@AdHoc", "ALTER TASK " + schedule1 + " DISABLE; ALTER TASK " + schedule2
+ " DISABLE; ALTER TASK " + schedule3 + " DISABLE;");
Thread.sleep(5);
table = getTaskStats(client);
assertEquals(3, table.getRowCount());
while (table.advanceRow()) {
String scheduleName = table.getString("NAME");
int id = -1;
if (schedule1.equalsIgnoreCase(scheduleName)) {
id = 1;
} else if (schedule2.equalsIgnoreCase(scheduleName)) {
id = 2;
} else if (schedule3.equalsIgnoreCase(scheduleName)) {
id = 3;
} else {
fail("Unknown schedule " + scheduleName);
}
assertEquals("DISABLED", table.getString("STATE"));
long summaryCount = client
.callProcedure("@AdHoc", "SELECT COUNT(*) FROM " + summaryTable + " WHERE id = " + id + ";")
.getResults()[0].asScalarLong();
long procedureInvocations = table.getLong("PROCEDURE_INVOCATIONS");
long successfulProcedureInvocations = procedureInvocations - table.getLong("PROCEDURE_FAILURES");
/*
* There can be one extra invocation since stats are done when the result comes back and a schedule can be
* stopped after procedure is called but before the result comes back
*/
assertTrue(
"Summary table has " + summaryCount + " rows. Invocation count is " + successfulProcedureInvocations
+ " for " + scheduleName,
summaryCount == successfulProcedureInvocations
|| summaryCount == successfulProcedureInvocations + 1);
long schedulerInvocations = table.getLong("SCHEDULER_INVOCATIONS");
assertTrue(
schedulerInvocations == procedureInvocations || schedulerInvocations == procedureInvocations + 1);
}
}
/*
* Test a schedule which runs on the partitions
*
* Create a simple schedule which prunes each partition down to 10 entries
*
* Test that partition failover works correctly
*/
@Test
public void partitionsSchedules() throws Exception {
Client client = getClient(0);
String tableName = getTableName(0, TableType.PARTITIONED);
String procName = getMethodName() + "_prune";
client.callProcedure("@AdHoc",
"CREATE PROCEDURE " + procName + " PARTITIONED AS DELETE FROM " + tableName
+ " ORDER BY key OFFSET 10");
AtomicReference<Exception> error = new AtomicReference<>();
Thread producer = new Thread() {
@Override
public void run() {
try {
while (true) {
insertRandomRows(client, 0, TableType.PARTITIONED, 10);
Thread.sleep(1);
}
} catch (InterruptedException e) {
return;
} catch (Exception e) {
error.set(e);
}
};
};
producer.start();
String schedule = getMethodName();
client.callProcedure("@AdHoc",
"CREATE TASK " + schedule + " ON SCHEDULE DELAY 10 MILLISECONDS PROCEDURE " + procName
+ " RUN ON PARTITIONS");
// Give everything some time to run
Thread.sleep(1000);
producer.interrupt();
producer.join();
VoltTable table = getTaskStats(client);
assertEquals(6, table.getRowCount());
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
}
Thread.sleep(15);
client.callProcedure("@AdHoc", "ALTER TASK " + schedule + " DISABLE;");
Thread.sleep(5);
table = getTaskStats(client);
assertEquals(6, table.getRowCount());
while (table.advanceRow()) {
assertEquals("DISABLED", table.getString("STATE"));
assertTrue("Scheduler invocation count is lower than expected: " + table.getLong("SCHEDULER_INVOCATIONS"),
table.getLong("SCHEDULER_INVOCATIONS") >= 50);
assertTrue("Procedure invocation count is lower than expected: " + table.getLong("PROCEDURE_INVOCATIONS"),
table.getLong("PROCEDURE_INVOCATIONS") >= 50);
}
assertEquals(60,
client.callProcedure("@AdHoc", "SELECT COUNT(*) FROM " + tableName).getResults()[0].asScalarLong());
// Test that partition schedules fail over
getCluster(0).killSingleHost(1);
table = getTaskStats(client);
assertEquals(6, table.getRowCount());
while (table.advanceRow()) {
assertEquals("DISABLED", table.getString("STATE"));
}
client.callProcedure("@AdHoc", "ALTER TASK " + schedule + " ENABLE;");
table = getTaskStats(client);
assertEquals(6, table.getRowCount());
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
}
try {
client.callProcedure("@AdHoc", "DROP PROCEDURE " + procName);
fail("Should not have been able to drop: " + procName);
} catch (ProcCallException e) {
String status = e.getClientResponse().getStatusString();
assertTrue(status, status.contains("Procedure does not exist: " + procName));
}
}
@Test
public void customScheduler() throws Exception {
Client client = getClient(0);
client.callProcedure("@AdHoc",
"CREATE TASK " + getMethodName() + " FROM CLASS " + CustomScheduler.class.getName()
+ " WITH (5, NULL);");
Thread.sleep(1000);
VoltTable table = getTaskStats(client);
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
assertNull(table.getString("SCHEDULER_STATUS"));
}
client.callProcedure("@AdHoc", "DROP TASK " + getMethodName());
client.callProcedure("@AdHoc",
"CREATE TASK " + getMethodName() + " FROM CLASS " + CustomScheduler.class.getName()
+ " WITH (5, 'STATUS');");
table = getTaskStats(client);
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
assertEquals("STATUS", table.getString("SCHEDULER_STATUS"));
}
}
/*
* Test creating tasks with AS USER clause
*/
@Test
public void tasksAsUser() throws Exception {
Client client = getClient(0);
String tableName = getTableName(0, TableType.PARTITIONED);
String procName = getMethodName() + "_prune";
client.callProcedure("@AdHoc",
"CREATE PROCEDURE " + procName + " PARTITIONED AS DELETE FROM " + tableName
+ " ORDER BY key OFFSET 10");
try {
client.callProcedure("@AdHoc",
"CREATE TASK " + getMethodName() + " ON SCHEDULE DELAY 5 MILLISECONDS PROCEDURE " + procName
+ " RUN ON PARTITIONS AS USER BOGUS_USER;");
fail("Should have failed to create a task with a bogus user");
} catch (ProcCallException e) {}
client.callProcedure("@AdHoc", "CREATE TASK " + getMethodName() + " ON SCHEDULE DELAY 5 MILLISECONDS PROCEDURE "
+ procName + " RUN ON PARTITIONS AS USER " + s_userName + ";");
Thread.sleep(500);
VoltTable table = getTaskStats(client);
assertEquals(6, table.getRowCount());
while (table.advanceRow()) {
assertEquals("RUNNING", table.getString("STATE"));
}
m_builder.clearUsers();
try {
getCluster(0).updateCatalog(m_builder);
fail("Should have failed to update the catalog since the user is in use");
} catch (ProcCallException e) {}
}
private static VoltTable getTaskStats(Client client)
throws NoConnectionsException, IOException, ProcCallException {
return client.callProcedure("@Statistics", "TASK", 0).getResults()[0];
}
public static class CustomScheduler implements ActionScheduler {
private int m_delayMs;
private String m_status;
public void initialize(int delayMs, String status) {
m_delayMs = delayMs;
m_status = status;
}
@Override
public DelayedAction getFirstDelayedAction() {
return getNextAction(null);
}
public DelayedAction getNextAction(ActionResult result) {
return DelayedAction.createCallback(m_delayMs, TimeUnit.MILLISECONDS, this::getNextAction)
.setStatusMessage(m_status);
}
}
}
|
TestTasksEnd2End: Do not use @AdHoc as task procedure
|
tests/frontend/org/voltdb/task/TestTasksEnd2End.java
|
TestTasksEnd2End: Do not use @AdHoc as task procedure
|
|
Java
|
lgpl-2.1
|
396ec0270bbf38362d160cec43be1be89489d2bd
| 0
|
getrailo/railo,getrailo/railo,modius/railo,JordanReiter/railo,getrailo/railo,modius/railo,JordanReiter/railo
|
package railo.runtime.dump;
import java.io.File;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.sql.ResultSet;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpSession;
import org.apache.xerces.dom.AttributeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import railo.commons.date.TimeZoneUtil;
import railo.commons.io.res.Resource;
import railo.commons.lang.IDGenerator;
import railo.commons.lang.StringUtil;
import railo.runtime.PageContext;
import railo.runtime.converter.WDDXConverter;
import railo.runtime.exp.PageException;
import railo.runtime.op.Caster;
import railo.runtime.op.Decision;
import railo.runtime.text.xml.XMLAttributes;
import railo.runtime.text.xml.XMLCaster;
import railo.runtime.type.Array;
import railo.runtime.type.Collection;
import railo.runtime.type.ObjectWrap;
import railo.runtime.type.QueryImpl;
import railo.runtime.type.dt.DateTimeImpl;
public class DumpUtil {
public static DumpData toDumpData(Object o,PageContext pageContext, int maxlevel, DumpProperties props) {
if(maxlevel<=0) {
return new SimpleDumpData("maximal dump level reached");
}
// null
if(o == null) {
DumpTable table=new DumpTablePro("null","#ff4400","#ff954f","#000000");
table.appendRow(new DumpRow(0,new SimpleDumpData("Empty:null")));
return table;
}
if(o instanceof DumpData) {
return ((DumpData)o);
}
// Date
if(o instanceof Date) {
return new DateTimeImpl((Date) o).toDumpData(pageContext,maxlevel,props);
}
// Calendar
if(o instanceof Calendar) {
Calendar c=(Calendar)o;
SimpleDateFormat df = new SimpleDateFormat("EE, dd MMM yyyy HH:mm:ss zz",Locale.ENGLISH);
df.setTimeZone(c.getTimeZone());
DumpTable table=new DumpTablePro("date","#ffb200","#ffcc00","#263300");
table.setTitle("java.util.Calendar");
table.appendRow(1, new SimpleDumpData("Timezone"), new SimpleDumpData(TimeZoneUtil.toString(c.getTimeZone())));
table.appendRow(1, new SimpleDumpData("Time"), new SimpleDumpData(df.format(c.getTime())));
return table;
}
// StringBuffer
if(o instanceof StringBuffer) {
DumpTable dt=(DumpTable)toDumpData(o.toString(), pageContext, maxlevel, props);
if(StringUtil.isEmpty(dt.getTitle()))
dt.setTitle(Caster.toClassName(o));
return dt;
}
// String
if(o instanceof String) {
String str=(String) o;
if(str.startsWith("<wddxPacket ")) {
try {
WDDXConverter converter =new WDDXConverter(pageContext.getTimeZone(),false);
converter.setTimeZone(pageContext.getTimeZone());
Object rst = converter.deserialize(str,false);
DumpData data = toDumpData(rst, pageContext, maxlevel, props);
DumpTable table = new DumpTablePro("string","#C2AF94","#F3EFEA","#000000");
table.setTitle("WDDX");
table.appendRow(1,new SimpleDumpData("encoded"),data);
table.appendRow(1,new SimpleDumpData("raw"),new SimpleDumpData(str));
return table;
}
catch(Throwable t) {}
}
DumpTable table = new DumpTablePro("string","#ff4400","#ff954f","#000000");
table.appendRow(1,new SimpleDumpData("string"),new SimpleDumpData(str));
return table;
}
// Character
if(o instanceof Character) {
DumpTable table = new DumpTablePro("character","#ff4400","#ff954f","#000000");
table.appendRow(1,new SimpleDumpData("character"),new SimpleDumpData(o.toString()));
return table;
}
// Number
if(o instanceof Number) {
DumpTable table = new DumpTablePro("numeric","#ff4400","#ff954f","#000000");
table.appendRow(1,new SimpleDumpData("number"),new SimpleDumpData(Caster.toString(((Number)o).doubleValue())));
return table;
}
// Boolean
if(o instanceof Boolean) {
DumpTable table = new DumpTablePro("boolean","#ff4400","#ff954f","#000000");
table.appendRow(1,new SimpleDumpData("boolean"),new SimpleDumpData(((Boolean)o).booleanValue()));
return table;
}
// File
if(o instanceof File) {
DumpTable table = new DumpTablePro("file","#979EAA","#DEE9FB","#000000");
table.appendRow(1,new SimpleDumpData("File"),new SimpleDumpData(o.toString()));
return table;
}
// Resource
if(o instanceof Resource) {
DumpTable table = new DumpTablePro("resource","#979EAA","#DEE9FB","#000000");
table.appendRow(1,new SimpleDumpData("Resource"),new SimpleDumpData(o.toString()));
return table;
}
// byte[]
if(o instanceof byte[]) {
byte[] bytes=(byte[]) o;
DumpTable table = new DumpTablePro("array","#ffb200","#ffcc00","#000000");
table.setTitle("Native Array ("+Caster.toClassName(o)+")");
StringBuffer sb=new StringBuffer();
for(int i=0;i<bytes.length;i++) {
if(i!=0)sb.append("-");
sb.append(bytes[i]);
if(i==1000) {
sb.append(" [truncated] ");
break;
}
}
table.appendRow(0,new SimpleDumpData(sb.toString()));
return table;
}
// Collection.Key
if(o instanceof Collection.Key) {
Collection.Key key=(Collection.Key) o;
DumpTable table = new DumpTablePro("string","#ff4400","#ff954f","#000000");
table.appendRow(1,new SimpleDumpData("Collection.Key"),new SimpleDumpData(key.getString()));
return table;
}
String id=""+IDGenerator.intId();
String refid=ThreadLocalDump.get(o);
if(refid!=null) {
DumpTablePro table = new DumpTablePro("ref","#eeeeee","#cccccc","#000000");
table.appendRow(1,new SimpleDumpData("Reference"),new SimpleDumpData(refid));
table.setRef(refid);
return setId(id,table);
}
ThreadLocalDump.set(o,id);
try{
// Printable
if(o instanceof Dumpable) {
return setId(id,((Dumpable)o).toDumpData(pageContext,maxlevel,props));
}
// Map
if(o instanceof Map) {
Map map=(Map) o;
Iterator it=map.keySet().iterator();
DumpTable table = new DumpTablePro("struct","#ffb200","#ffcc00","#000000");
table.setTitle("Map ("+Caster.toClassName(o)+")");
while(it.hasNext()) {
Object next=it.next();
table.appendRow(1,toDumpData(next,pageContext,maxlevel,props),toDumpData(map.get(next),pageContext,maxlevel,props));
}
return setId(id,table);
}
// List
if(o instanceof List) {
List list=(List) o;
ListIterator it=list.listIterator();
DumpTable table = new DumpTablePro("array","#ffb200","#ffcc00","#000000");
table.setTitle("Array (List)");
while(it.hasNext()) {
table.appendRow(1,new SimpleDumpData(it.nextIndex()+1),toDumpData(it.next(),pageContext,maxlevel,props));
}
return setId(id,table);
}
// Resultset
if(o instanceof ResultSet) {
try {
DumpData dd = new QueryImpl((ResultSet)o,"query").toDumpData(pageContext,maxlevel,props);
if(dd instanceof DumpTable)
((DumpTable)dd).setTitle(Caster.toClassName(o));
return setId(id,dd);
}
catch (PageException e) {
}
}
// Enumeration
if(o instanceof Enumeration) {
Enumeration e=(Enumeration)o;
DumpTable table = new DumpTablePro("enumeration","#ffb200","#ffcc00","#000000");
table.setTitle("Enumeration");
while(e.hasMoreElements()) {
table.appendRow(0,toDumpData(e.nextElement(),pageContext,maxlevel,props));
}
return setId(id,table);
}
// Object[]
if(Decision.isNativeArray(o)) {
Array arr;
try {
arr = Caster.toArray(o);
DumpTable htmlBox = new DumpTablePro("array","#ffb200","#ffcc00","#000000");
htmlBox.setTitle("Native Array ("+Caster.toClassName(o)+")");
int length=arr.size();
for(int i=1;i<=length;i++) {
Object ox=null;
try {
ox = arr.getE(i);
} catch (Exception e) {}
htmlBox.appendRow(1,new SimpleDumpData(i),toDumpData(ox,pageContext,maxlevel,props));
}
return setId(id,htmlBox);
}
catch (PageException e) {
return setId(id,new SimpleDumpData(""));
}
}
// Node
if(o instanceof Node) {
return setId(id,XMLCaster.toDumpData((Node)o, pageContext,maxlevel,props));
}
// ObjectWrap
if(o instanceof ObjectWrap) {
maxlevel++;
return setId(id,toDumpData(((ObjectWrap)o).getEmbededObject(null), pageContext,maxlevel,props));
}
// NodeList
if(o instanceof NodeList) {
NodeList list=(NodeList)o;
int len=list.getLength();
DumpTable table = new DumpTablePro("xml","#C2AF94","#F3EFEA","#000000");
for(int i=0;i<len;i++) {
table.appendRow(1,new SimpleDumpData(i),toDumpData(list.item(i),pageContext,maxlevel,props));
}
return setId(id,table);
}
// AttributeMap
if(o instanceof AttributeMap) {
return setId(id,new XMLAttributes((AttributeMap)o,false).toDumpData(pageContext, maxlevel,props));
}
// HttpSession
if(o instanceof HttpSession) {
HttpSession hs = (HttpSession)o;
Enumeration e = hs.getAttributeNames();
DumpTable htmlBox = new DumpTablePro("httpsession","#5965e4","#9999ff","#000000");
htmlBox.setTitle("HttpSession");
while(e.hasMoreElements()) {
String key=e.nextElement().toString();
htmlBox.appendRow(1,new SimpleDumpData(key),toDumpData(hs.getAttribute(key), pageContext,maxlevel,props));
}
return setId(id,htmlBox);
}
// reflect
//else {
DumpTable table = new DumpTablePro(o.getClass().getName(),"#90776E","#B2A49B","#000000");
Class clazz=o.getClass();
if(o instanceof Class) clazz=(Class) o;
String fullClassName=clazz.getName();
int pos=fullClassName.lastIndexOf('.');
String className=pos==-1?fullClassName:fullClassName.substring(pos+1);
table.setTitle(className);
table.appendRow(1,new SimpleDumpData("class"),new SimpleDumpData(fullClassName));
// Fields
Field[] fields=clazz.getFields();
DumpTable fieldDump = new DumpTable("#90776E","#B2A49B","#000000");
fieldDump.appendRow(7,new SimpleDumpData("name"),new SimpleDumpData("pattern"),new SimpleDumpData("value"));
for(int i=0;i<fields.length;i++) {
Field field = fields[i];
DumpData value;
try {//print.out(o+":"+maxlevel);
value=new SimpleDumpData(Caster.toString(field.get(o), ""));
}
catch (Exception e) {
value=new SimpleDumpData("");
}
fieldDump.appendRow(0,new SimpleDumpData(field.getName()),new SimpleDumpData(field.toString()),value);
}
if(fields.length>0)table.appendRow(1,new SimpleDumpData("fields"),fieldDump);
// Methods
StringBuffer objMethods=new StringBuffer();
Method[] methods=clazz.getMethods();
DumpTable methDump = new DumpTable("#90776E","#B2A49B","#000000");
methDump.appendRow(7,new SimpleDumpData("return"),new SimpleDumpData("interface"),new SimpleDumpData("exceptions"));
for(int i=0;i<methods.length;i++) {
Method method = methods[i];
if(Object.class==method.getDeclaringClass()) {
if(objMethods.length()>0)objMethods.append(", ");
objMethods.append(method.getName());
continue;
}
// exceptions
StringBuffer sbExp=new StringBuffer();
Class[] exceptions = method.getExceptionTypes();
for(int p=0;p<exceptions.length;p++){
if(p>0)sbExp.append("\n");
sbExp.append(Caster.toClassName(exceptions[p]));
}
// parameters
StringBuffer sbParams=new StringBuffer(method.getName());
sbParams.append('(');
Class[] parameters = method.getParameterTypes();
for(int p=0;p<parameters.length;p++){
if(p>0)sbParams.append(", ");
sbParams.append(Caster.toClassName(parameters[p]));
}
sbParams.append(')');
methDump.appendRow(0,
new SimpleDumpData(Caster.toClassName(method.getReturnType())),
new SimpleDumpData(sbParams.toString()),
new SimpleDumpData(sbExp.toString())
);
}
if(methods.length>0)table.appendRow(1,new SimpleDumpData("methods"),methDump);
DumpTable inherited = new DumpTable("#90776E","#B2A49B","#000000");
inherited.appendRow(7,new SimpleDumpData("Methods inherited from java.lang.Object"));
inherited.appendRow(0,new SimpleDumpData(objMethods.toString()));
table.appendRow(1,new SimpleDumpData(""),inherited);
return setId(id,table);
//}
}
finally{
ThreadLocalDump.remove(o);
}
}
private static DumpData setId(String id, DumpData data) {
if(data instanceof DumpTablePro) {
((DumpTablePro)data).setId(id);
}
// TODO Auto-generated method stub
return data;
}
public static boolean keyValid(DumpProperties props,int level, String key) {
if(props.getMaxlevel()-level>1) return true;
// show
Set set = props.getShow();
if(set!=null && !set.contains(StringUtil.toLowerCase(key)))
return false;
// hide
set = props.getHide();
if(set!=null && set.contains(StringUtil.toLowerCase(key)))
return false;
return true;
}
public static boolean keyValid(DumpProperties props,int level, Collection.Key key) {
if(props.getMaxlevel()-level>1) return true;
// show
Set set = props.getShow();
if(set!=null && !set.contains(key.getLowerString()))
return false;
// hide
set = props.getHide();
if(set!=null && set.contains(key.getLowerString()))
return false;
return true;
}
public static DumpProperties toDumpProperties() {
return DumpProperties.DEFAULT;
}
}
|
railo-java/railo-core/src/railo/runtime/dump/DumpUtil.java
|
package railo.runtime.dump;
import java.io.File;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.sql.ResultSet;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpSession;
import org.apache.xerces.dom.AttributeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import railo.commons.date.TimeZoneUtil;
import railo.commons.io.res.Resource;
import railo.commons.lang.IDGenerator;
import railo.commons.lang.StringUtil;
import railo.runtime.PageContext;
import railo.runtime.converter.WDDXConverter;
import railo.runtime.exp.PageException;
import railo.runtime.op.Caster;
import railo.runtime.op.Decision;
import railo.runtime.text.xml.XMLAttributes;
import railo.runtime.text.xml.XMLCaster;
import railo.runtime.type.Array;
import railo.runtime.type.Collection;
import railo.runtime.type.ObjectWrap;
import railo.runtime.type.QueryImpl;
import railo.runtime.type.dt.DateTimeImpl;
public class DumpUtil {
public static DumpData toDumpData(Object o,PageContext pageContext, int maxlevel, DumpProperties props) {
if(maxlevel<=0) {
return new SimpleDumpData("maximal dump level reached");
}
// null
if(o == null) {
DumpTable table=new DumpTablePro("null","#ff4400","#ff954f","#000000");
table.appendRow(new DumpRow(0,new SimpleDumpData("Empty:null")));
return table;
}
if(o instanceof DumpData) {
return ((DumpData)o);
}
// Date
if(o instanceof Date) {
return new DateTimeImpl((Date) o).toDumpData(pageContext,maxlevel,props);
}
// Calendar
if(o instanceof Calendar) {
Calendar c=(Calendar)o;
SimpleDateFormat df = new SimpleDateFormat("EE, dd MMM yyyy HH:mm:ss zz",Locale.ENGLISH);
df.setTimeZone(c.getTimeZone());
DumpTable table=new DumpTablePro("date","#ffb200","#ffcc00","#263300");
table.setTitle("java.util.Calendar");
table.appendRow(1, new SimpleDumpData("Timezone"), new SimpleDumpData(TimeZoneUtil.toString(c.getTimeZone())));
table.appendRow(1, new SimpleDumpData("Time"), new SimpleDumpData(df.format(c.getTime())));
return table;
}
// StringBuffer
if(o instanceof StringBuffer) {
DumpTable dt=(DumpTable)toDumpData(o.toString(), pageContext, maxlevel, props);
if(StringUtil.isEmpty(dt.getTitle()))
dt.setTitle(Caster.toClassName(o));
return dt;
}
// String
if(o instanceof String) {
String str=(String) o;
if(str.startsWith("<wddxPacket ")) {
try {
WDDXConverter converter =new WDDXConverter(pageContext.getTimeZone(),false);
converter.setTimeZone(pageContext.getTimeZone());
Object rst = converter.deserialize(str,false);
DumpData data = toDumpData(rst, pageContext, maxlevel, props);
DumpTable table = new DumpTablePro("string","#C2AF94","#F3EFEA","#000000");
table.setTitle("WDDX");
table.appendRow(1,new SimpleDumpData("encoded"),data);
table.appendRow(1,new SimpleDumpData("raw"),new SimpleDumpData(str));
return table;
}
catch(Throwable t) {}
}
DumpTable table = new DumpTablePro("string","#ff4400","#ff954f","#000000");
table.appendRow(1,new SimpleDumpData("string"),new SimpleDumpData(str));
return table;
}
// Character
if(o instanceof Character) {
DumpTable table = new DumpTablePro("character","#ff4400","#ff954f","#000000");
table.appendRow(1,new SimpleDumpData("character"),new SimpleDumpData(o.toString()));
return table;
}
// Number
if(o instanceof Number) {
DumpTable table = new DumpTablePro("numeric","#ff4400","#ff954f","#000000");
table.appendRow(1,new SimpleDumpData("number"),new SimpleDumpData(Caster.toString(((Number)o).doubleValue())));
return table;
}
// Boolean
if(o instanceof Boolean) {
DumpTable table = new DumpTablePro("boolean","#ff4400","#ff954f","#000000");
table.appendRow(1,new SimpleDumpData("boolean"),new SimpleDumpData(((Boolean)o).booleanValue()));
return table;
}
// File
if(o instanceof File) {
DumpTable table = new DumpTablePro("file","#979EAA","#DEE9FB","#000000");
table.appendRow(1,new SimpleDumpData("File"),new SimpleDumpData(o.toString()));
return table;
}
// Resource
if(o instanceof Resource) {
DumpTable table = new DumpTablePro("resource","#979EAA","#DEE9FB","#000000");
table.appendRow(1,new SimpleDumpData("Resource"),new SimpleDumpData(o.toString()));
return table;
}
// byte[]
if(o instanceof byte[]) {
byte[] bytes=(byte[]) o;
DumpTable table = new DumpTablePro("array","#ffb200","#ffcc00","#000000");
table.setTitle("Native Array");
StringBuffer sb=new StringBuffer();
for(int i=0;i<bytes.length;i++) {
if(i!=0)sb.append("-");
sb.append(bytes[i]);
if(i==1000) {
sb.append(" [truncated] ");
break;
}
}
table.appendRow(0,new SimpleDumpData(sb.toString()));
return table;
}
// Collection.Key
if(o instanceof Collection.Key) {
Collection.Key key=(Collection.Key) o;
DumpTable table = new DumpTablePro("string","#ff4400","#ff954f","#000000");
table.appendRow(1,new SimpleDumpData("Collection.Key"),new SimpleDumpData(key.getString()));
return table;
}
String id=""+IDGenerator.intId();
String refid=ThreadLocalDump.get(o);
if(refid!=null) {
DumpTablePro table = new DumpTablePro("ref","#eeeeee","#cccccc","#000000");
table.appendRow(1,new SimpleDumpData("Reference"),new SimpleDumpData(refid));
table.setRef(refid);
return setId(id,table);
}
ThreadLocalDump.set(o,id);
try{
// Printable
if(o instanceof Dumpable) {
return setId(id,((Dumpable)o).toDumpData(pageContext,maxlevel,props));
}
// Map
if(o instanceof Map) {
Map map=(Map) o;
Iterator it=map.keySet().iterator();
DumpTable table = new DumpTablePro("struct","#ffb200","#ffcc00","#000000");
table.setTitle("Map ("+Caster.toClassName(o)+")");
while(it.hasNext()) {
Object next=it.next();
table.appendRow(1,toDumpData(next,pageContext,maxlevel,props),toDumpData(map.get(next),pageContext,maxlevel,props));
}
return setId(id,table);
}
// List
if(o instanceof List) {
List list=(List) o;
ListIterator it=list.listIterator();
DumpTable table = new DumpTablePro("array","#ffb200","#ffcc00","#000000");
table.setTitle("Array (List)");
while(it.hasNext()) {
table.appendRow(1,new SimpleDumpData(it.nextIndex()+1),toDumpData(it.next(),pageContext,maxlevel,props));
}
return setId(id,table);
}
// Resultset
if(o instanceof ResultSet) {
try {
DumpData dd = new QueryImpl((ResultSet)o,"query").toDumpData(pageContext,maxlevel,props);
if(dd instanceof DumpTable)
((DumpTable)dd).setTitle(Caster.toClassName(o));
return setId(id,dd);
}
catch (PageException e) {
}
}
// Enumeration
if(o instanceof Enumeration) {
Enumeration e=(Enumeration)o;
DumpTable table = new DumpTablePro("enumeration","#ffb200","#ffcc00","#000000");
table.setTitle("Enumeration");
while(e.hasMoreElements()) {
table.appendRow(0,toDumpData(e.nextElement(),pageContext,maxlevel,props));
}
return setId(id,table);
}
// Object[]
if(Decision.isNativeArray(o)) {
Array arr;
try {
arr = Caster.toArray(o);
DumpTable htmlBox = new DumpTablePro("array","#ffb200","#ffcc00","#000000");
htmlBox.setTitle("Native Array");
int length=arr.size();
for(int i=1;i<=length;i++) {
Object ox=null;
try {
ox = arr.getE(i);
} catch (Exception e) {}
htmlBox.appendRow(1,new SimpleDumpData(i),toDumpData(ox,pageContext,maxlevel,props));
}
return setId(id,htmlBox);
}
catch (PageException e) {
return setId(id,new SimpleDumpData(""));
}
}
// Node
if(o instanceof Node) {
return setId(id,XMLCaster.toDumpData((Node)o, pageContext,maxlevel,props));
}
// ObjectWrap
if(o instanceof ObjectWrap) {
maxlevel++;
return setId(id,toDumpData(((ObjectWrap)o).getEmbededObject(null), pageContext,maxlevel,props));
}
// NodeList
if(o instanceof NodeList) {
NodeList list=(NodeList)o;
int len=list.getLength();
DumpTable table = new DumpTablePro("xml","#C2AF94","#F3EFEA","#000000");
for(int i=0;i<len;i++) {
table.appendRow(1,new SimpleDumpData(i),toDumpData(list.item(i),pageContext,maxlevel,props));
}
return setId(id,table);
}
// AttributeMap
if(o instanceof AttributeMap) {
return setId(id,new XMLAttributes((AttributeMap)o,false).toDumpData(pageContext, maxlevel,props));
}
// HttpSession
if(o instanceof HttpSession) {
HttpSession hs = (HttpSession)o;
Enumeration e = hs.getAttributeNames();
DumpTable htmlBox = new DumpTablePro("httpsession","#5965e4","#9999ff","#000000");
htmlBox.setTitle("HttpSession");
while(e.hasMoreElements()) {
String key=e.nextElement().toString();
htmlBox.appendRow(1,new SimpleDumpData(key),toDumpData(hs.getAttribute(key), pageContext,maxlevel,props));
}
return setId(id,htmlBox);
}
// reflect
//else {
DumpTable table = new DumpTablePro(o.getClass().getName(),"#90776E","#B2A49B","#000000");
Class clazz=o.getClass();
if(o instanceof Class) clazz=(Class) o;
String fullClassName=clazz.getName();
int pos=fullClassName.lastIndexOf('.');
String className=pos==-1?fullClassName:fullClassName.substring(pos+1);
table.setTitle(className);
table.appendRow(1,new SimpleDumpData("class"),new SimpleDumpData(fullClassName));
// Fields
Field[] fields=clazz.getFields();
DumpTable fieldDump = new DumpTable("#90776E","#B2A49B","#000000");
fieldDump.appendRow(7,new SimpleDumpData("name"),new SimpleDumpData("pattern"),new SimpleDumpData("value"));
for(int i=0;i<fields.length;i++) {
Field field = fields[i];
DumpData value;
try {//print.out(o+":"+maxlevel);
value=new SimpleDumpData(Caster.toString(field.get(o), ""));
}
catch (Exception e) {
value=new SimpleDumpData("");
}
fieldDump.appendRow(0,new SimpleDumpData(field.getName()),new SimpleDumpData(field.toString()),value);
}
if(fields.length>0)table.appendRow(1,new SimpleDumpData("fields"),fieldDump);
// Methods
StringBuffer objMethods=new StringBuffer();
Method[] methods=clazz.getMethods();
DumpTable methDump = new DumpTable("#90776E","#B2A49B","#000000");
methDump.appendRow(7,new SimpleDumpData("return"),new SimpleDumpData("interface"),new SimpleDumpData("exceptions"));
for(int i=0;i<methods.length;i++) {
Method method = methods[i];
if(Object.class==method.getDeclaringClass()) {
if(objMethods.length()>0)objMethods.append(", ");
objMethods.append(method.getName());
continue;
}
// exceptions
StringBuffer sbExp=new StringBuffer();
Class[] exceptions = method.getExceptionTypes();
for(int p=0;p<exceptions.length;p++){
if(p>0)sbExp.append("\n");
sbExp.append(Caster.toClassName(exceptions[p]));
}
// parameters
StringBuffer sbParams=new StringBuffer(method.getName());
sbParams.append('(');
Class[] parameters = method.getParameterTypes();
for(int p=0;p<parameters.length;p++){
if(p>0)sbParams.append(", ");
sbParams.append(Caster.toClassName(parameters[p]));
}
sbParams.append(')');
methDump.appendRow(0,
new SimpleDumpData(Caster.toClassName(method.getReturnType())),
new SimpleDumpData(sbParams.toString()),
new SimpleDumpData(sbExp.toString())
);
}
if(methods.length>0)table.appendRow(1,new SimpleDumpData("methods"),methDump);
DumpTable inherited = new DumpTable("#90776E","#B2A49B","#000000");
inherited.appendRow(7,new SimpleDumpData("Methods inherited from java.lang.Object"));
inherited.appendRow(0,new SimpleDumpData(objMethods.toString()));
table.appendRow(1,new SimpleDumpData(""),inherited);
return setId(id,table);
//}
}
finally{
ThreadLocalDump.remove(o);
}
}
private static DumpData setId(String id, DumpData data) {
if(data instanceof DumpTablePro) {
((DumpTablePro)data).setId(id);
}
// TODO Auto-generated method stub
return data;
}
public static boolean keyValid(DumpProperties props,int level, String key) {
if(props.getMaxlevel()-level>1) return true;
// show
Set set = props.getShow();
if(set!=null && !set.contains(StringUtil.toLowerCase(key)))
return false;
// hide
set = props.getHide();
if(set!=null && set.contains(StringUtil.toLowerCase(key)))
return false;
return true;
}
public static boolean keyValid(DumpProperties props,int level, Collection.Key key) {
if(props.getMaxlevel()-level>1) return true;
// show
Set set = props.getShow();
if(set!=null && !set.contains(key.getLowerString()))
return false;
// hide
set = props.getHide();
if(set!=null && set.contains(key.getLowerString()))
return false;
return true;
}
public static DumpProperties toDumpProperties() {
return DumpProperties.DEFAULT;
}
}
|
improved title for native array dump
|
railo-java/railo-core/src/railo/runtime/dump/DumpUtil.java
|
improved title for native array dump
|
|
Java
|
apache-2.0
|
5055f0195699bb5139761bab35be05d6c0b525f3
| 0
|
hekonsek/fabric8,rajdavies/fabric8,migue/fabric8,punkhorn/fabric8,jimmidyson/fabric8,jonathanchristison/fabric8,sobkowiak/fabric8,jonathanchristison/fabric8,dhirajsb/fabric8,rhuss/fabric8,rnc/fabric8,gashcrumb/fabric8,KurtStam/fabric8,migue/fabric8,KurtStam/fabric8,sobkowiak/fabric8,avano/fabric8,sobkowiak/fabric8,sobkowiak/fabric8,dhirajsb/fabric8,migue/fabric8,gashcrumb/fabric8,hekonsek/fabric8,hekonsek/fabric8,EricWittmann/fabric8,mwringe/fabric8,jonathanchristison/fabric8,avano/fabric8,avano/fabric8,jimmidyson/fabric8,rhuss/fabric8,EricWittmann/fabric8,rnc/fabric8,christian-posta/fabric8,punkhorn/fabric8,christian-posta/fabric8,jonathanchristison/fabric8,rhuss/fabric8,aslakknutsen/fabric8,jludvice/fabric8,hekonsek/fabric8,aslakknutsen/fabric8,punkhorn/fabric8,chirino/fabric8,punkhorn/fabric8,chirino/fabric8v2,PhilHardwick/fabric8,chirino/fabric8v2,zmhassan/fabric8,rhuss/fabric8,PhilHardwick/fabric8,migue/fabric8,EricWittmann/fabric8,jludvice/fabric8,KurtStam/fabric8,dhirajsb/fabric8,rnc/fabric8,zmhassan/fabric8,mwringe/fabric8,chirino/fabric8,rajdavies/fabric8,jimmidyson/fabric8,janstey/fabric8,rnc/fabric8,mwringe/fabric8,janstey/fabric8,jimmidyson/fabric8,avano/fabric8,janstey/fabric8,chirino/fabric8,gashcrumb/fabric8,rnc/fabric8,zmhassan/fabric8,rajdavies/fabric8,aslakknutsen/fabric8,PhilHardwick/fabric8,jludvice/fabric8,rajdavies/fabric8,EricWittmann/fabric8,chirino/fabric8v2,christian-posta/fabric8,hekonsek/fabric8,christian-posta/fabric8,chirino/fabric8v2,mwringe/fabric8,jludvice/fabric8,gashcrumb/fabric8,KurtStam/fabric8,jimmidyson/fabric8,PhilHardwick/fabric8,zmhassan/fabric8,chirino/fabric8,dhirajsb/fabric8
|
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.agent.utils;
import java.io.File;
import java.io.StringWriter;
import java.net.MalformedURLException;
import java.net.URI;
import java.util.Collection;
import java.util.Collections;
import java.util.Dictionary;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import io.fabric8.agent.download.DownloadFuture;
import io.fabric8.agent.download.DownloadManager;
import io.fabric8.agent.download.FutureListener;
import io.fabric8.agent.mvn.Parser;
import io.fabric8.api.FabricService;
import io.fabric8.api.Profile;
import io.fabric8.common.util.MultiException;
import io.fabric8.common.util.Strings;
import io.fabric8.service.VersionPropertyPointerResolver;
import io.fabric8.utils.features.FeatureUtils;
import org.apache.karaf.features.BundleInfo;
import org.apache.karaf.features.Feature;
import org.apache.karaf.features.Repository;
import org.apache.karaf.features.internal.FeatureValidationUtil;
import org.apache.karaf.features.internal.RepositoryImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static io.fabric8.utils.PatchUtils.extractUrl;
public class AgentUtils {
private static final Logger LOGGER = LoggerFactory.getLogger(AgentUtils.class);
public static final String FAB_PROTOCOL = "fab:";
public static final String REQ_PROTOCOL = "req:";
/**
* Returns the location and parser map (i.e. the location and the parsed maven coordinates and artifact locations) of each bundle and feature
* of the given profile
*/
public static Map<String, Parser> getProfileArtifacts(FabricService fabricService, DownloadManager downloadManager, Profile profile) throws Exception {
List<String> bundles = profile.getBundles();
Set<Feature> features = new HashSet<Feature>();
addFeatures(features, fabricService, downloadManager, profile);
return getProfileArtifacts(fabricService, profile, bundles, features);
}
/**
* Returns the location and parser map (i.e. the location and the parsed maven coordinates and artifact locations) of each bundle and feature
*/
public static Map<String, Parser> getProfileArtifacts(FabricService fabricService, Profile profile, Iterable<String> bundles, Iterable<Feature> features) {
Set<String> locations = new HashSet<String>();
for (Feature feature : features) {
List<BundleInfo> bundleList = feature.getBundles();
if (bundleList == null) {
LOGGER.warn("No bundles for feature " + feature);
} else {
for (BundleInfo bundle : bundleList) {
locations.add(bundle.getLocation());
}
}
}
for (String bundle : bundles) {
locations.add(bundle);
}
Map<String,Parser> artifacts = new HashMap<String, Parser>();
for (String location : locations) {
try {
if (location.contains("$")) {
location = VersionPropertyPointerResolver.replaceVersions(fabricService, profile.getOverlay().getConfigurations(), location);
}
if (location.startsWith("mvn:") || location.contains(":mvn:")) {
Parser parser = Parser.parsePathWithSchemePrefix(location);
artifacts.put(location, parser);
}
} catch (MalformedURLException e) {
LOGGER.error("Failed to parse bundle URL: " + location + ". " + e, e);
}
}
return artifacts;
}
public static void addRepository(DownloadManager manager, Map<URI, Repository> repositories, URI uri) throws Exception {
if (!repositories.containsKey(uri)) {
File file = manager.download(uri.toString()).await().getFile();
FeatureValidationUtil.validate(file.toURI());
//We are using the file uri instead of the maven url, because we want to make sure, that the repo can load.
//If we used the maven uri instead then we would have to make sure that the download location is added to
//the ops4j pax url configuration. Using the first is a lot safer and less prone to misconfigurations.
RepositoryImpl repo = new RepositoryImpl(file.toURI());
repositories.put(uri, repo);
repo.load();
for (URI ref : repo.getRepositories()) {
addRepository(manager, repositories, ref);
}
}
}
/**
* Extracts the {@link java.net.URI}/{@link org.apache.karaf.features.Repository} map from the profile.
*
*
* @param fabricService
* @param downloadManager
* @param profile
* @return
* @throws java.net.URISyntaxException
*/
protected static Map<URI, Repository> getRepositories(FabricService fabricService, DownloadManager downloadManager, Profile profile) throws Exception {
Map<URI, Repository> repositories = new HashMap<URI, Repository>();
for (String repositoryUrl : profile.getRepositories()) {
if (Strings.isNotBlank(repositoryUrl)) {
try {
// lets replace any version expressions
String replacedUrl = repositoryUrl;
if (repositoryUrl.contains("$")) {
replacedUrl = VersionPropertyPointerResolver.replaceVersions(fabricService, profile.getConfigurations(), repositoryUrl);
}
URI repoUri = new URI(replacedUrl);
addRepository(downloadManager, repositories, repoUri);
} catch (Exception e) {
LOGGER.warn("Failed to add repository " + repositoryUrl + " for profile " + profile.getId() + ". " + e);
}
}
}
return repositories;
}
/**
* Adds the set of features to the given set for the given profile
*
* @param features
* @param fabricService
*@param downloadManager
* @param profile @throws Exception
*/
public static void addFeatures(Set<Feature> features, FabricService fabricService, DownloadManager downloadManager, Profile profile) throws Exception {
List<String> featureNames = profile.getFeatures();
Map<URI, Repository> repositories = getRepositories(fabricService, downloadManager, profile);
for (String featureName : featureNames) {
Feature feature = FeatureUtils.search(featureName, repositories.values());
if (feature == null) {
LOGGER.warn("Could not find feature " + featureName
+ " for profile " + profile.getId()
+ " in repositories " + repositories.keySet());
} else {
features.addAll(expandFeature(feature, repositories));
}
}
}
public static Set<Feature> expandFeature(Feature feature, Map<URI, Repository> repositories) {
Set<Feature> features = new HashSet<Feature>();
for (Feature f : feature.getDependencies()) {
Feature loaded = FeatureUtils.search(f.getName(), repositories.values());
features.addAll(expandFeature(loaded, repositories));
}
features.add(feature);
return features;
}
public static Map<String, Repository> loadRepositories(DownloadManager manager, Set<String> uris) throws Exception {
RepositoryDownloader downloader = new RepositoryDownloader(manager);
downloader.download(uris);
return downloader.await();
}
/**
* Downloads all the bundles and features for the given profile
*/
public static Map<String, File> downloadProfileArtifacts(FabricService fabricService, DownloadManager downloadManager, Profile profile) throws Exception {
List<String> bundles = profile.getBundles();
Set<Feature> features = new HashSet<Feature>();
addFeatures(features, fabricService, downloadManager, profile);
return downloadBundles(downloadManager, features, bundles, Collections.EMPTY_SET);
}
public static Map<String, File> downloadBundles(DownloadManager manager, Iterable<Feature> features, Iterable<String> bundles, Set<String> overrides) throws Exception {
Set<String> locations = new HashSet<String>();
for (Feature feature : features) {
for (BundleInfo bundle : feature.getBundles()) {
locations.add(bundle.getLocation());
}
}
for (String bundle : bundles) {
locations.add(bundle);
}
for (String override : overrides) {
locations.add(extractUrl(override));
}
return downloadLocations(manager, locations);
}
public static Map<String, File> downloadLocations(DownloadManager manager, Collection<String> locations) throws MalformedURLException, InterruptedException, MultiException {
FileDownloader downloader = new FileDownloader(manager);
downloader.download(locations);
return downloader.await();
}
public static void addMavenProxies(Dictionary props, FabricService fabricService) {
try {
if (fabricService != null) {
StringBuilder sb = new StringBuilder();
for (URI uri : fabricService.getMavenRepoURIs()) {
String mavenRepo = uri.toString();
if (!mavenRepo.endsWith("/")) {
mavenRepo += "/";
}
if (sb.length() > 0) {
sb.append(",");
}
sb.append(mavenRepo);
sb.append("@snapshots");
}
String existingRepos = (String) props.get("org.ops4j.pax.url.mvn.repositories");
if (existingRepos != null) {
if (sb.length() > 0) {
sb.append(",");
}
sb.append(existingRepos);
}
props.put("org.ops4j.pax.url.mvn.repositories", sb.toString());
}
} catch (Exception e) {
LOGGER.warn("Unable to retrieve maven proxy urls: " + e.getMessage());
LOGGER.debug("Unable to retrieve maven proxy urls: " + e.getMessage(), e);
}
}
public interface DownloadCallback {
public void downloaded(File file) throws Exception;
}
public static abstract class ArtifactDownloader<T> {
protected final DownloadManager manager;
protected final Object lock = new Object();
protected final ConcurrentMap<String, DownloadFuture> futures = new ConcurrentHashMap<String, DownloadFuture>();
protected final ConcurrentMap<String, T> artifacts = new ConcurrentHashMap<String, T>();
protected final List<Throwable> errors = new CopyOnWriteArrayList<Throwable>();
protected final AtomicInteger pendings = new AtomicInteger();
public ArtifactDownloader(DownloadManager manager) {
this.manager = manager;
}
public void download(String uri, final DownloadCallback callback) throws MalformedURLException {
synchronized (lock) {
DownloadFuture future = futures.get(uri);
if (future == null) {
pendings.incrementAndGet();
future = manager.download(uri);
future.addListener(new FutureListener<DownloadFuture>() {
@Override
public void operationComplete(DownloadFuture future) {
onDownloaded(future, callback);
}
});
futures.put(uri, future);
}
}
}
public DownloadFuture download(String uri) throws MalformedURLException {
synchronized (lock) {
DownloadFuture future = futures.get(uri);
if (future == null) {
pendings.incrementAndGet();
future = manager.download(uri);
future.addListener(new FutureListener<DownloadFuture>() {
@Override
public void operationComplete(DownloadFuture future) {
onDownloaded(future, null);
}
});
futures.put(uri, future);
}
return future;
}
}
protected void onDownloaded(DownloadFuture future, DownloadCallback callback) {
synchronized (lock) {
try {
String url = future.getUrl();
File file = future.getFile();
if (file != null) {
T t = getArtifact(url, file);
artifacts.put(url, t);
if (callback != null) {
callback.downloaded(file);
}
}
} catch (Throwable t) {
errors.add(t);
} finally {
pendings.decrementAndGet();
lock.notifyAll();
}
}
}
protected abstract T getArtifact(String uri, File file) throws Exception;
public void download(Iterable<String> uris) throws MalformedURLException {
for (String uri : uris) {
download(uri);
}
}
public Map<String, T> await() throws InterruptedException, MultiException {
synchronized (lock) {
while (pendings.get() > 0) {
lock.wait();
}
if (!errors.isEmpty()) {
StringWriter sw = new StringWriter();
int nr = 1;
int pad = Integer.toString(errors.size()).length();
for (Throwable t : errors) {
sw.append(String.format("%n\t%0" + pad + "d: %s", nr++, t.getMessage()));
}
LOGGER.error("Summary of errors while downloading artifacts:" + sw.toString());
throw new MultiException(String.format("Error%s while downloading artifacts:%s", errors.size() == 1 ? "" : "s", sw.toString()), errors);
}
return artifacts;
}
}
}
public static class RepositoryDownloader extends ArtifactDownloader<Repository> {
public RepositoryDownloader(DownloadManager manager) {
super(manager);
}
@Override
protected Repository getArtifact(String uri, File file) throws Exception {
FeatureValidationUtil.validate(file.toURI());
//We are using the file uri instead of the maven url, because we want to make sure, that the repo can load.
//If we used the maven uri instead then we would have to make sure that the download location is added to
//the ops4j pax url configuration. Using the first is a lot safer and less prone to misconfigurations.
RepositoryImpl repo = new RepositoryImpl(file.toURI());
repo.load();
for (URI ref : repo.getRepositories()) {
download(ref.toString());
}
return repo;
}
}
public static class FileDownloader extends ArtifactDownloader<File> {
public FileDownloader(DownloadManager manager) {
super(manager);
}
@Override
protected File getArtifact(String uri, File file) throws Exception {
return file;
}
}
}
|
fabric/fabric-agent/src/main/java/io/fabric8/agent/utils/AgentUtils.java
|
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.agent.utils;
import java.io.File;
import java.io.StringWriter;
import java.net.MalformedURLException;
import java.net.URI;
import java.util.Collection;
import java.util.Collections;
import java.util.Dictionary;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicInteger;
import io.fabric8.agent.download.DownloadFuture;
import io.fabric8.agent.download.DownloadManager;
import io.fabric8.agent.download.FutureListener;
import io.fabric8.agent.mvn.Parser;
import io.fabric8.api.FabricService;
import io.fabric8.api.Profile;
import io.fabric8.common.util.MultiException;
import io.fabric8.common.util.Strings;
import io.fabric8.service.VersionPropertyPointerResolver;
import io.fabric8.utils.features.FeatureUtils;
import org.apache.karaf.features.BundleInfo;
import org.apache.karaf.features.Feature;
import org.apache.karaf.features.Repository;
import org.apache.karaf.features.internal.FeatureValidationUtil;
import org.apache.karaf.features.internal.RepositoryImpl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static io.fabric8.utils.PatchUtils.extractUrl;
public class AgentUtils {
private static final Logger LOGGER = LoggerFactory.getLogger(AgentUtils.class);
public static final String FAB_PROTOCOL = "fab:";
public static final String REQ_PROTOCOL = "req:";
/**
* Returns the location and parser map (i.e. the location and the parsed maven coordinates and artifact locations) of each bundle and feature
* of the given profile
*/
public static Map<String, Parser> getProfileArtifacts(FabricService fabricService, DownloadManager downloadManager, Profile profile) throws Exception {
List<String> bundles = profile.getBundles();
Set<Feature> features = new HashSet<Feature>();
addFeatures(features, fabricService, downloadManager, profile);
return getProfileArtifacts(fabricService, profile, bundles, features);
}
/**
* Returns the location and parser map (i.e. the location and the parsed maven coordinates and artifact locations) of each bundle and feature
*/
public static Map<String, Parser> getProfileArtifacts(FabricService fabricService, Profile profile, Iterable<String> bundles, Iterable<Feature> features) {
Set<String> locations = new HashSet<String>();
for (Feature feature : features) {
List<BundleInfo> bundleList = feature.getBundles();
if (bundleList == null) {
LOGGER.warn("No bundles for feature " + feature);
} else {
for (BundleInfo bundle : bundleList) {
locations.add(bundle.getLocation());
}
}
}
for (String bundle : bundles) {
locations.add(bundle);
}
Map<String,Parser> artifacts = new HashMap<String, Parser>();
for (String location : locations) {
try {
if (location.contains("$")) {
location = VersionPropertyPointerResolver.replaceVersions(fabricService, profile.getOverlay().getConfigurations(), location);
}
Parser parser = Parser.parsePathWithSchemePrefix(location);
artifacts.put(location, parser);
} catch (MalformedURLException e) {
LOGGER.error("Failed to parse bundle URL: " + location + ". " + e, e);
}
}
return artifacts;
}
public static void addRepository(DownloadManager manager, Map<URI, Repository> repositories, URI uri) throws Exception {
if (!repositories.containsKey(uri)) {
File file = manager.download(uri.toString()).await().getFile();
FeatureValidationUtil.validate(file.toURI());
//We are using the file uri instead of the maven url, because we want to make sure, that the repo can load.
//If we used the maven uri instead then we would have to make sure that the download location is added to
//the ops4j pax url configuration. Using the first is a lot safer and less prone to misconfigurations.
RepositoryImpl repo = new RepositoryImpl(file.toURI());
repositories.put(uri, repo);
repo.load();
for (URI ref : repo.getRepositories()) {
addRepository(manager, repositories, ref);
}
}
}
/**
* Extracts the {@link java.net.URI}/{@link org.apache.karaf.features.Repository} map from the profile.
*
*
* @param fabricService
* @param downloadManager
* @param profile
* @return
* @throws java.net.URISyntaxException
*/
protected static Map<URI, Repository> getRepositories(FabricService fabricService, DownloadManager downloadManager, Profile profile) throws Exception {
Map<URI, Repository> repositories = new HashMap<URI, Repository>();
for (String repositoryUrl : profile.getRepositories()) {
if (Strings.isNotBlank(repositoryUrl)) {
try {
// lets replace any version expressions
String replacedUrl = repositoryUrl;
if (repositoryUrl.contains("$")) {
replacedUrl = VersionPropertyPointerResolver.replaceVersions(fabricService, profile.getConfigurations(), repositoryUrl);
}
URI repoUri = new URI(replacedUrl);
addRepository(downloadManager, repositories, repoUri);
} catch (Exception e) {
LOGGER.warn("Failed to add repository " + repositoryUrl + " for profile " + profile.getId() + ". " + e);
}
}
}
return repositories;
}
/**
* Adds the set of features to the given set for the given profile
*
* @param features
* @param fabricService
*@param downloadManager
* @param profile @throws Exception
*/
public static void addFeatures(Set<Feature> features, FabricService fabricService, DownloadManager downloadManager, Profile profile) throws Exception {
List<String> featureNames = profile.getFeatures();
Map<URI, Repository> repositories = getRepositories(fabricService, downloadManager, profile);
for (String featureName : featureNames) {
Feature feature = FeatureUtils.search(featureName, repositories.values());
if (feature == null) {
LOGGER.warn("Could not find feature " + featureName
+ " for profile " + profile.getId()
+ " in repositories " + repositories.keySet());
} else {
features.addAll(expandFeature(feature, repositories));
}
}
}
public static Set<Feature> expandFeature(Feature feature, Map<URI, Repository> repositories) {
Set<Feature> features = new HashSet<Feature>();
for (Feature f : feature.getDependencies()) {
Feature loaded = FeatureUtils.search(f.getName(), repositories.values());
features.addAll(expandFeature(loaded, repositories));
}
features.add(feature);
return features;
}
public static Map<String, Repository> loadRepositories(DownloadManager manager, Set<String> uris) throws Exception {
RepositoryDownloader downloader = new RepositoryDownloader(manager);
downloader.download(uris);
return downloader.await();
}
/**
* Downloads all the bundles and features for the given profile
*/
public static Map<String, File> downloadProfileArtifacts(FabricService fabricService, DownloadManager downloadManager, Profile profile) throws Exception {
List<String> bundles = profile.getBundles();
Set<Feature> features = new HashSet<Feature>();
addFeatures(features, fabricService, downloadManager, profile);
return downloadBundles(downloadManager, features, bundles, Collections.EMPTY_SET);
}
public static Map<String, File> downloadBundles(DownloadManager manager, Iterable<Feature> features, Iterable<String> bundles, Set<String> overrides) throws Exception {
Set<String> locations = new HashSet<String>();
for (Feature feature : features) {
for (BundleInfo bundle : feature.getBundles()) {
locations.add(bundle.getLocation());
}
}
for (String bundle : bundles) {
locations.add(bundle);
}
for (String override : overrides) {
locations.add(extractUrl(override));
}
return downloadLocations(manager, locations);
}
public static Map<String, File> downloadLocations(DownloadManager manager, Collection<String> locations) throws MalformedURLException, InterruptedException, MultiException {
FileDownloader downloader = new FileDownloader(manager);
downloader.download(locations);
return downloader.await();
}
public static void addMavenProxies(Dictionary props, FabricService fabricService) {
try {
if (fabricService != null) {
StringBuilder sb = new StringBuilder();
for (URI uri : fabricService.getMavenRepoURIs()) {
String mavenRepo = uri.toString();
if (!mavenRepo.endsWith("/")) {
mavenRepo += "/";
}
if (sb.length() > 0) {
sb.append(",");
}
sb.append(mavenRepo);
sb.append("@snapshots");
}
String existingRepos = (String) props.get("org.ops4j.pax.url.mvn.repositories");
if (existingRepos != null) {
if (sb.length() > 0) {
sb.append(",");
}
sb.append(existingRepos);
}
props.put("org.ops4j.pax.url.mvn.repositories", sb.toString());
}
} catch (Exception e) {
LOGGER.warn("Unable to retrieve maven proxy urls: " + e.getMessage());
LOGGER.debug("Unable to retrieve maven proxy urls: " + e.getMessage(), e);
}
}
public interface DownloadCallback {
public void downloaded(File file) throws Exception;
}
public static abstract class ArtifactDownloader<T> {
protected final DownloadManager manager;
protected final Object lock = new Object();
protected final ConcurrentMap<String, DownloadFuture> futures = new ConcurrentHashMap<String, DownloadFuture>();
protected final ConcurrentMap<String, T> artifacts = new ConcurrentHashMap<String, T>();
protected final List<Throwable> errors = new CopyOnWriteArrayList<Throwable>();
protected final AtomicInteger pendings = new AtomicInteger();
public ArtifactDownloader(DownloadManager manager) {
this.manager = manager;
}
public void download(String uri, final DownloadCallback callback) throws MalformedURLException {
synchronized (lock) {
DownloadFuture future = futures.get(uri);
if (future == null) {
pendings.incrementAndGet();
future = manager.download(uri);
future.addListener(new FutureListener<DownloadFuture>() {
@Override
public void operationComplete(DownloadFuture future) {
onDownloaded(future, callback);
}
});
futures.put(uri, future);
}
}
}
public DownloadFuture download(String uri) throws MalformedURLException {
synchronized (lock) {
DownloadFuture future = futures.get(uri);
if (future == null) {
pendings.incrementAndGet();
future = manager.download(uri);
future.addListener(new FutureListener<DownloadFuture>() {
@Override
public void operationComplete(DownloadFuture future) {
onDownloaded(future, null);
}
});
futures.put(uri, future);
}
return future;
}
}
protected void onDownloaded(DownloadFuture future, DownloadCallback callback) {
synchronized (lock) {
try {
String url = future.getUrl();
File file = future.getFile();
if (file != null) {
T t = getArtifact(url, file);
artifacts.put(url, t);
if (callback != null) {
callback.downloaded(file);
}
}
} catch (Throwable t) {
errors.add(t);
} finally {
pendings.decrementAndGet();
lock.notifyAll();
}
}
}
protected abstract T getArtifact(String uri, File file) throws Exception;
public void download(Iterable<String> uris) throws MalformedURLException {
for (String uri : uris) {
download(uri);
}
}
public Map<String, T> await() throws InterruptedException, MultiException {
synchronized (lock) {
while (pendings.get() > 0) {
lock.wait();
}
if (!errors.isEmpty()) {
StringWriter sw = new StringWriter();
int nr = 1;
int pad = Integer.toString(errors.size()).length();
for (Throwable t : errors) {
sw.append(String.format("%n\t%0" + pad + "d: %s", nr++, t.getMessage()));
}
LOGGER.error("Summary of errors while downloading artifacts:" + sw.toString());
throw new MultiException(String.format("Error%s while downloading artifacts:%s", errors.size() == 1 ? "" : "s", sw.toString()), errors);
}
return artifacts;
}
}
}
public static class RepositoryDownloader extends ArtifactDownloader<Repository> {
public RepositoryDownloader(DownloadManager manager) {
super(manager);
}
@Override
protected Repository getArtifact(String uri, File file) throws Exception {
FeatureValidationUtil.validate(file.toURI());
//We are using the file uri instead of the maven url, because we want to make sure, that the repo can load.
//If we used the maven uri instead then we would have to make sure that the download location is added to
//the ops4j pax url configuration. Using the first is a lot safer and less prone to misconfigurations.
RepositoryImpl repo = new RepositoryImpl(file.toURI());
repo.load();
for (URI ref : repo.getRepositories()) {
download(ref.toString());
}
return repo;
}
}
public static class FileDownloader extends ArtifactDownloader<File> {
public FileDownloader(DownloadManager manager) {
super(manager);
}
@Override
protected File getArtifact(String uri, File file) throws Exception {
return file;
}
}
}
|
avoid exception if we define bundles/features using non-mvn coordinates
|
fabric/fabric-agent/src/main/java/io/fabric8/agent/utils/AgentUtils.java
|
avoid exception if we define bundles/features using non-mvn coordinates
|
|
Java
|
apache-2.0
|
6a27330c2447090225578bff6f67d3523a85ef11
| 0
|
nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch
|
public class Tile
{
public static final int TOP = 0;
public static final int LEFT = 1;
public static final int BOTTOM = 2;
public static final int RIGHT = 3;
public Tile (long number, String[] data)
{
_id = number;
_data = new char[data.length][data[0].length()];
_originalState = new char[data.length][data[0].length()];
for (int i = 0; i < data.length; i++)
{
for (int j = 0; j < data[0].length(); j++)
{
_data[i][j] = data[i].charAt(j);
_originalState[i][j] = data[i].charAt(j);
}
}
_freeze = false; // should the tile move?
_isConnected = new boolean[4]; // edges connected?
_connections = new long[4]; // if so, which ones?
for (int i = 0; i < 4; i++)
{
_isConnected[i] = false; // which edges are connected?
_connections[i] = 0;
}
_numberOfConnections = 0;
}
public final void removeBorders ()
{
// remove
}
/*
* Check for this pattern ...
*
* #
* # ## ## ###
* # # # # # #
*/
public final boolean hasSeaMonster (int x, int y)
{
if ((_data[y + 1][x + 1] == TileData.HASH) && (_data[y + 1][x + 4] == TileData.HASH)
&& (_data[y][x + 5] == TileData.HASH) && (_data[y][x + 6] == TileData.HASH)
&& (_data[y + 1][x + 7] == TileData.HASH) && (_data[y + 1][x + 10] == TileData.HASH)
&& (_data[y][x + 11] == TileData.HASH) && (_data[y][x + 12] == TileData.HASH)
&& (_data[y + 1][x + 13] == TileData.HASH) && (_data[y + 1][x + 16] == TileData.HASH)
&& (_data[y][x + 17] == TileData.HASH) && (_data[y][x + 18] == TileData.HASH)
&& (_data[y][x + 19] == TileData.HASH) && (_data[y - 1][x + 18] == TileData.HASH))
{
return true;
}
return false;
}
public final long getID ()
{
return _id;
}
public final boolean isFrozen ()
{
return _freeze;
}
public final void freeze ()
{
_freeze = true;
}
public final boolean[] getConnectionStatuses ()
{
return _isConnected;
}
public final long[] getConnections ()
{
return _connections;
}
/*
* rotates tile 90 degrees clockwise. Assume you
* have to call it multiple times to get through
* all 360 degrees.
*/
public void rotate ()
{
int x = _data.length;
int y = _data[0].length;
char[][] temp = new char[x][y];
for (int i = 0; i < x; i++)
{
for (int j = 0; j < y; j++)
{
temp[j][x - 1 - i] = _data[i][j];
}
}
_data = temp;
}
/*
* Flip top to bottom.
*/
public void invert ()
{
int x = _data.length;
int y = _data[0].length;
char[][] temp = new char[x][y];
for (int i = 0; i < x; i++)
{
for (int j = 0; j < y; j++)
{
temp[i][_data[i].length - j - 1] = _data[i][j];
}
}
_data = temp;
}
public boolean connectTopToBottom (Tile toCheck)
{
// check edges match
// store the edges separately again?
System.out.println("connectTopToBottom "+toCheck);
for (int i = 0; i < _data.length; i++)
{
if (_data[0][i] != toCheck._data[_data.length - 1][i])
return false;
}
connect(toCheck, TOP, BOTTOM);
return true;
}
public boolean connectBottomToTop (Tile toCheck)
{
// check edges match
System.out.println("connectBottomToTop "+toCheck);
for (int i = 0; i < _data.length; i++)
{
if (_data[_data.length - 1][i] != toCheck._data[0][i])
return false;
}
connect(toCheck, BOTTOM, TOP);
return true;
}
public boolean connectLeftToRight (Tile toCheck)
{
// check edges match
System.out.println("connectLeftToRight "+toCheck);
for (int i = 0; i < _data.length; i++)
{
if (_data[i][0] != toCheck._data[i][_data.length - 1])
return false;
}
connect(toCheck, LEFT, RIGHT);
return true;
}
public boolean connectRightToLeft (Tile toCheck)
{
// check edges match
System.out.println("connectRightToLeft "+toCheck);
for (int i = 0; i < _data.length; i++)
{
if (_data[i][_data.length - 1] != toCheck._data[i][0])
return false;
}
connect(toCheck, RIGHT, LEFT);
return true;
}
private final void connect (Tile toCheck, int thisEdge, int otherEdge)
{
if ((_connections[thisEdge] == 0) && (toCheck._connections[otherEdge] == 0))
{
toCheck.freeze();
freeze();
_connections[thisEdge] = toCheck.getID();
_isConnected[thisEdge] = true;
_numberOfConnections++;
toCheck._connections[otherEdge] = getID();
toCheck._isConnected[otherEdge] = true;
toCheck._numberOfConnections++;
}
}
@Override
public boolean equals (Object obj)
{
if (obj == null)
return false;
if (this == obj)
return true;
if (getClass() == obj.getClass())
{
Tile temp = (Tile) obj;
return (_id == temp._id);
}
return false;
}
@Override
public String toString ()
{
String str = TileData.TILE_ID+_id+":\n";
for (int i = 0; i < _data.length; i++)
{
for (int j = 0; j < _data[0].length; j++)
{
str += _data[i][j];
}
str += "\n";
}
return str;
}
private long _id;
private char[][] _data;
private char[][] _originalState;
private boolean _freeze;
private boolean[] _isConnected;
private long[] _connections;
private int _numberOfConnections;
}
|
AdventOfCode/2020/day20/part2/Tile.java
|
public class Tile
{
public static final int TOP = 0;
public static final int LEFT = 1;
public static final int BOTTOM = 2;
public static final int RIGHT = 3;
public Tile (long number, String[] data)
{
_id = number;
_data = new char[data.length][data[0].length()];
_originalState = new char[data.length][data[0].length()];
for (int i = 0; i < data.length; i++)
{
for (int j = 0; j < data[0].length(); j++)
{
_data[i][j] = data[i].charAt(j);
_originalState[i][j] = data[i].charAt(j);
}
}
_freeze = false; // should the tile move?
_isConnected = new boolean[4]; // edges connected?
_connections = new long[4]; // if so, which ones?
for (int i = 0; i < 4; i++)
{
_isConnected[i] = false; // which edges are connected?
_connections[i] = 0;
}
_numberOfConnections = 0;
}
public final void removeBorders ()
{
// remove
}
/*
* Check for this pattern ...
*
* #
* # ## ## ###
* # # # # # #
*/
public final boolean hasSeaMonster (int x, int y)
{
if ((_data[y + 1][x + 1] == TileData.HASH) && (_data[y + 1][x + 4] == TileData.HASH)
&& (_data[y][x + 5] == TileData.HASH) && (_data[y][x + 6] == TileData.HASH)
&& (_data[y + 1][x + 7] == TileData.HASH) && (_data[y + 1][x + 10] == TileData.HASH)
&& (_data[y][x + 11] == TileData.HASH) && (_data[y][x + 12] == TileData.HASH)
&& (_data[y + 1][x + 13] == TileData.HASH) && (_data[y + 1][x + 16] == TileData.HASH)
&& (_data[y][x + 17] == TileData.HASH) && (_data[y][x + 18] == TileData.HASH)
&& (_data[y][x + 19] == TileData.HASH) && (_data[y - 1][x + 18] == TileData.HASH))
{
return true;
}
return false;
}
public final long getID ()
{
return _id;
}
public final boolean isFrozen ()
{
return _freeze;
}
public final void freeze ()
{
_freeze = true;
}
public final boolean[] getConnectionStatuses ()
{
return _isConnected;
}
public final long[] getConnections ()
{
return _connections;
}
/*
* rotates tile 90 degrees clockwise. Assume you
* have to call it multiple times to get through
* all 360 degrees.
*/
public void rotate ()
{
int x = _data.length;
int y = _data[0].length;
char[][] temp = new char[x][y];
for (int i = 0; i < x; i++)
{
for (int j = 0; j < y; j++)
{
temp[j][x - 1 - i] = _data[i][j];
}
}
_data = temp;
}
/*
* Flip top to bottom.
*/
public void invert ()
{
int x = _data.length;
int y = _data[0].length;
char[][] temp = new char[x][y];
for (int i = 0; i < x; i++)
{
for (int j = 0; j < y; j++)
{
temp[i][_data[i].length - j - 1] = _data[i][j];
}
}
_data = temp;
}
public boolean connectTopToBottom (Tile toCheck)
{
// check edges match
// store the edges separately again?
System.out.println("connectTopToBottom "+toCheck);
for (int i = 0; i < _data.length; i++)
{
if (_data[0][i] != toCheck._data[_data.length - 1][i])
return false;
}
connect(toCheck, TOP, BOTTOM);
return true;
}
public boolean connectBottomToTop (Tile toCheck)
{
// check edges match
System.out.println("connectBottomToTop "+toCheck);
for (int i = 0; i < _data.length; i++)
{
if (_data[_data.length - 1][i] != toCheck._data[0][i])
return false;
}
connect(toCheck, BOTTOM, TOP);
return true;
}
public boolean connectLeftToRight (Tile toCheck)
{
// check edges match
System.out.println("connectLeftToRight "+toCheck);
for (int i = 0; i < _data.length; i++)
{
if (_data[i][0] != toCheck._data[i][_data.length - 1])
return false;
}
connect(toCheck, LEFT, RIGHT);
return true;
}
public boolean connectRightToLeft (Tile toCheck)
{
// check edges match
System.out.println("connectRightToLeft "+toCheck);
for (int i = 0; i < _data.length; i++)
{
if (_data[i][_data.length - 1] != toCheck._data[i][0])
return false;
}
connect(toCheck, RIGHT, LEFT);
return true;
}
private final void connect (Tile toCheck, int thisEdge, int otherEdge)
{
if ((_connections[thisEdge] == 0) && (toCheck._connections[otherEdge] == 0))
{
_connections[thisEdge] = toCheck.getID();
_isConnected[thisEdge] = true;
_numberOfConnections++;
toCheck._connections[otherEdge] = getID();
toCheck._isConnected[otherEdge] = true;
toCheck._numberOfConnections++;
}
}
@Override
public boolean equals (Object obj)
{
if (obj == null)
return false;
if (this == obj)
return true;
if (getClass() == obj.getClass())
{
Tile temp = (Tile) obj;
return (_id == temp._id);
}
return false;
}
@Override
public String toString ()
{
String str = TileData.TILE_ID+_id+":\n";
for (int i = 0; i < _data.length; i++)
{
for (int j = 0; j < _data[0].length; j++)
{
str += _data[i][j];
}
str += "\n";
}
return str;
}
private long _id;
private char[][] _data;
private char[][] _originalState;
private boolean _freeze;
private boolean[] _isConnected;
private long[] _connections;
private int _numberOfConnections;
}
|
Update Tile.java
|
AdventOfCode/2020/day20/part2/Tile.java
|
Update Tile.java
|
|
Java
|
apache-2.0
|
773e05a9aa897edb3c90c3d4b5d1aab81504470d
| 0
|
aefimov/idea-jflex
|
package org.intellij.lang.jflex.psi.impl;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.LiteralTextEscaper;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiLanguageInjectionHost;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import org.intellij.lang.jflex.JFlexElementTypes;
import org.intellij.lang.jflex.injection.EmbeddedJavaLiteralTextEscaper;
import org.intellij.lang.jflex.psi.JFlexJavaCode;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* Created by IntelliJ IDEA.
* User: Max
* Date: 15.03.2008
* Time: 18:51:14
*/
public class JFlexJavaCodeImpl extends JFlexElementImpl implements JFlexJavaCode {
public JFlexJavaCodeImpl(@NotNull ASTNode node) {
super(node);
}
public boolean isMatchAction() {
ASTNode prev = getNode().getTreePrev();
return prev != null && prev.getElementType() == JFlexElementTypes.LEFT_BRACE;
}
@Nullable
@Deprecated
public List<Pair<PsiElement, TextRange>> getInjectedPsi() {
return InjectedLanguageUtil.getInjectedPsiFiles(this);
}
public void processInjectedPsi(@NotNull InjectedPsiVisitor visitor) {
InjectedLanguageUtil.enumerate(this, visitor);
}
public PsiLanguageInjectionHost updateText(@NotNull String text) {
return this;
}
public void fixText(@NotNull String text) {
}
@NotNull
public LiteralTextEscaper<JFlexJavaCode> createLiteralTextEscaper() {
return new EmbeddedJavaLiteralTextEscaper(this);
}
}
|
src/org/intellij/lang/jflex/psi/impl/JFlexJavaCodeImpl.java
|
package org.intellij.lang.jflex.psi.impl;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.LiteralTextEscaper;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiLanguageInjectionHost;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import org.intellij.lang.jflex.JFlexElementTypes;
import org.intellij.lang.jflex.injection.EmbeddedJavaLiteralTextEscaper;
import org.intellij.lang.jflex.psi.JFlexJavaCode;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* Created by IntelliJ IDEA.
* User: Max
* Date: 15.03.2008
* Time: 18:51:14
*/
public class JFlexJavaCodeImpl extends JFlexElementImpl implements JFlexJavaCode {
public JFlexJavaCodeImpl(@NotNull ASTNode node) {
super(node);
}
public boolean isMatchAction() {
ASTNode prev = getNode().getTreePrev();
return prev != null && prev.getElementType() == JFlexElementTypes.LEFT_BRACE;
}
@Nullable
@Deprecated
public List<Pair<PsiElement, TextRange>> getInjectedPsi() {
return InjectedLanguageUtil.getInjectedPsiFiles(this);
}
public void processInjectedPsi(@NotNull InjectedPsiVisitor visitor) {
InjectedLanguageUtil.enumerate(this, visitor);
}
public PsiLanguageInjectionHost updateText(@NotNull String text) {
return this;
}
public void fixText(@NotNull String text) {
}
@NotNull
public LiteralTextEscaper createLiteralTextEscaper() {
return new EmbeddedJavaLiteralTextEscaper(this);
}
}
|
createLiteralTextEscaper method signature tweaked to eliminate unchecked warning
|
src/org/intellij/lang/jflex/psi/impl/JFlexJavaCodeImpl.java
|
createLiteralTextEscaper method signature tweaked to eliminate unchecked warning
|
|
Java
|
apache-2.0
|
1f8979546f14d9fe7eee69d13c6dd4967f5c8aa7
| 0
|
permazen/permazen,archiecobbs/jsimpledb,permazen/permazen,permazen/permazen,archiecobbs/jsimpledb,archiecobbs/jsimpledb
|
/*
* Copyright (C) 2015 Archie L. Cobbs. All rights reserved.
*/
package org.jsimpledb.kv.sql;
import com.google.common.base.Preconditions;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.NoSuchElementException;
import java.util.concurrent.Future;
import org.jsimpledb.kv.AbstractKVStore;
import org.jsimpledb.kv.CloseableKVStore;
import org.jsimpledb.kv.KVPair;
import org.jsimpledb.kv.KVStore;
import org.jsimpledb.kv.KVTransaction;
import org.jsimpledb.kv.KVTransactionException;
import org.jsimpledb.kv.StaleTransactionException;
import org.jsimpledb.kv.mvcc.MutableView;
import org.jsimpledb.kv.util.ForwardingKVStore;
import org.jsimpledb.util.ByteUtil;
import org.jsimpledb.util.CloseableIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@link SQLKVDatabase} transaction.
*/
public class SQLKVTransaction extends ForwardingKVStore implements KVTransaction {
protected final Logger log = LoggerFactory.getLogger(this.getClass());
protected final SQLKVDatabase database;
protected final Connection connection;
private long timeout;
private boolean readOnly;
private KVStore view;
private boolean closed;
private boolean stale;
/**
* Constructor.
*
* @param database the associated database
* @param connection the {@link Connection} for the transaction
* @throws SQLException if an SQL error occurs
*/
public SQLKVTransaction(SQLKVDatabase database, Connection connection) throws SQLException {
Preconditions.checkArgument(database != null, "null database");
Preconditions.checkArgument(connection != null, "null connection");
this.database = database;
this.connection = connection;
}
@Override
public SQLKVDatabase getKVDatabase() {
return this.database;
}
@Override
public void setTimeout(long timeout) {
Preconditions.checkArgument(timeout >= 0, "timeout < 0");
this.timeout = timeout;
}
/**
* Watch a key to monitor for changes in its value.
*
* <p>
* The implementation in {@link SQLKVTransaction} always throws {@link UnsupportedOperationException}.
* Subclasses may add support using a database-specific notification mechanism.
*
* @param key {@inheritDoc}
* @return {@inheritDoc}
* @throws StaleTransactionException {@inheritDoc}
* @throws org.jsimpledb.kv.RetryTransactionException {@inheritDoc}
* @throws org.jsimpledb.kv.KVDatabaseException {@inheritDoc}
* @throws UnsupportedOperationException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
@Override
public Future<Void> watchKey(byte[] key) {
throw new UnsupportedOperationException();
}
private synchronized byte[] getSQL(byte[] key) {
if (this.stale)
throw new StaleTransactionException(this);
Preconditions.checkArgument(key != null, "null key");
return this.queryBytes(StmtType.GET, key);
}
private synchronized KVPair getAtLeastSQL(byte[] minKey, byte[] maxKey) {
if (this.stale)
throw new StaleTransactionException(this);
return minKey != null && minKey.length > 0 ?
(maxKey != null ?
this.queryKVPair(StmtType.GET_RANGE_FORWARD_SINGLE, minKey, maxKey) :
this.queryKVPair(StmtType.GET_AT_LEAST_FORWARD_SINGLE, minKey)) :
(maxKey != null ?
this.queryKVPair(StmtType.GET_AT_MOST_FORWARD_SINGLE, maxKey) :
this.queryKVPair(StmtType.GET_FIRST));
}
private synchronized KVPair getAtMostSQL(byte[] maxKey, byte[] minKey) {
if (this.stale)
throw new StaleTransactionException(this);
return maxKey != null ?
(minKey != null && minKey.length > 0 ?
this.queryKVPair(StmtType.GET_RANGE_REVERSE_SINGLE, minKey, maxKey) :
this.queryKVPair(StmtType.GET_AT_MOST_REVERSE_SINGLE, maxKey)) :
(minKey != null && minKey.length > 0 ?
this.queryKVPair(StmtType.GET_AT_LEAST_REVERSE_SINGLE, minKey) :
this.queryKVPair(StmtType.GET_LAST));
}
private synchronized CloseableIterator<KVPair> getRangeSQL(byte[] minKey, byte[] maxKey, boolean reverse) {
if (this.stale)
throw new StaleTransactionException(this);
if (minKey != null && minKey.length == 0)
minKey = null;
if (minKey == null && maxKey == null)
return this.queryIterator(reverse ? StmtType.GET_ALL_REVERSE : StmtType.GET_ALL_FORWARD);
if (minKey == null)
return this.queryIterator(reverse ? StmtType.GET_AT_MOST_REVERSE : StmtType.GET_AT_MOST_FORWARD, maxKey);
if (maxKey == null)
return this.queryIterator(reverse ? StmtType.GET_AT_LEAST_REVERSE : StmtType.GET_AT_LEAST_FORWARD, minKey);
else
return this.queryIterator(reverse ? StmtType.GET_RANGE_REVERSE : StmtType.GET_RANGE_FORWARD, minKey, maxKey);
}
private synchronized void putSQL(byte[] key, byte[] value) {
Preconditions.checkArgument(key != null, "null key");
Preconditions.checkArgument(value != null, "null value");
if (this.stale)
throw new StaleTransactionException(this);
this.update(StmtType.PUT, key, value, value);
}
private synchronized void removeSQL(byte[] key) {
Preconditions.checkArgument(key != null, "null key");
if (this.stale)
throw new StaleTransactionException(this);
this.update(StmtType.REMOVE, key);
}
private synchronized void removeRangeSQL(byte[] minKey, byte[] maxKey) {
if (this.stale)
throw new StaleTransactionException(this);
if (minKey != null && minKey.length == 0)
minKey = null;
if (minKey == null && maxKey == null)
this.update(StmtType.REMOVE_ALL);
else if (minKey == null)
this.update(StmtType.REMOVE_AT_MOST, maxKey);
else if (maxKey == null)
this.update(StmtType.REMOVE_AT_LEAST, minKey);
else
this.update(StmtType.REMOVE_RANGE, minKey, maxKey);
}
@Override
public synchronized boolean isReadOnly() {
return this.readOnly;
}
@Override
public synchronized void setReadOnly(boolean readOnly) {
Preconditions.checkState(this.view == null || readOnly == this.readOnly, "data already accessed");
this.readOnly = readOnly;
}
@Override
protected synchronized KVStore delegate() {
if (this.view == null) {
this.view = new SQLView();
if (this.readOnly && !this.database.rollbackForReadOnly)
this.view = new MutableView(this.view);
}
return this.view;
}
@Override
public synchronized void commit() {
if (this.stale)
throw new StaleTransactionException(this);
this.stale = true;
try {
if (this.readOnly && !(this.view instanceof MutableView))
this.connection.rollback();
else
this.connection.commit();
} catch (SQLException e) {
throw this.handleException(e);
} finally {
this.closeConnection();
}
}
@Override
public synchronized void rollback() {
if (this.stale)
return;
this.stale = true;
try {
this.connection.rollback();
} catch (SQLException e) {
throw this.handleException(e);
} finally {
this.closeConnection();
}
}
@Override
public CloseableKVStore mutableSnapshot() {
throw new UnsupportedOperationException();
}
/**
* Handle an unexpected SQL exception.
*
* <p>
* The implementation in {@link SQLKVTransaction} rolls back the SQL transaction, closes the associated {@link Connection},
* and wraps the exception via {@link SQLKVDatabase#wrapException SQLKVDatabase.wrapException()}.
*
* @param e original exception
* @return key/value transaction exception
*/
protected KVTransactionException handleException(SQLException e) {
this.stale = true;
try {
this.connection.rollback();
} catch (SQLException e2) {
// ignore
} finally {
this.closeConnection();
}
return this.database.wrapException(this, e);
}
/**
* Close the {@link Connection} associated with this instance, if it's not already closed.
* This method is idempotent.
*/
protected void closeConnection() {
if (this.closed)
return;
this.closed = true;
try {
this.connection.close();
} catch (SQLException e) {
// ignore
}
}
@Override
protected void finalize() throws Throwable {
try {
if (!this.stale)
this.log.warn(this + " leaked without commit() or rollback()");
this.closeConnection();
} finally {
super.finalize();
}
}
// Helper methods
protected byte[] queryBytes(StmtType stmtType, byte[]... params) {
return this.query(stmtType, (stmt, rs) -> rs.next() ? rs.getBytes(1) : null, true, params);
}
protected KVPair queryKVPair(StmtType stmtType, byte[]... params) {
return this.query(stmtType, (stmt, rs) -> rs.next() ? new KVPair(rs.getBytes(1), rs.getBytes(2)) : null, true, params);
}
protected CloseableIterator<KVPair> queryIterator(StmtType stmtType, byte[]... params) {
return this.query(stmtType, ResultSetIterator::new, false, params);
}
protected <T> T query(StmtType stmtType, ResultSetFunction<T> resultSetFunction, boolean close, byte[]... params) {
try {
final PreparedStatement preparedStatement = stmtType.create(this.database, this.connection, this.log);
final int numParams = preparedStatement.getParameterMetaData().getParameterCount();
for (int i = 0; i < params.length && i < numParams; i++) {
if (this.log.isTraceEnabled())
this.log.trace("setting ?" + (i + 1) + " = " + ByteUtil.toString(params[i]));
preparedStatement.setBytes(i + 1, params[i]);
}
preparedStatement.setQueryTimeout((int)((this.timeout + 999) / 1000));
if (this.log.isTraceEnabled())
this.log.trace("executing SQL query: " + preparedStatement + " in " + this);
final ResultSet resultSet = preparedStatement.executeQuery();
final T result = resultSetFunction.apply(preparedStatement, resultSet);
if (close) {
resultSet.close();
preparedStatement.close();
}
return result;
} catch (SQLException e) {
throw this.handleException(e);
}
}
protected void update(StmtType stmtType, byte[]... params) {
try (final PreparedStatement preparedStatement = stmtType.create(this.database, this.connection, this.log)) {
final int numParams = preparedStatement.getParameterMetaData().getParameterCount();
for (int i = 0; i < params.length && i < numParams; i++) {
if (this.log.isTraceEnabled())
this.log.trace("setting ?" + (i + 1) + " = " + ByteUtil.toString(params[i]));
preparedStatement.setBytes(i + 1, params[i]);
}
preparedStatement.setQueryTimeout((int)((this.timeout + 999) / 1000));
if (this.log.isTraceEnabled())
this.log.trace("executing SQL update: " + preparedStatement + " in " + this);
preparedStatement.executeUpdate();
} catch (SQLException e) {
throw this.handleException(e);
}
}
// SQLView
private class SQLView extends AbstractKVStore {
@Override
public byte[] get(byte[] key) {
return SQLKVTransaction.this.getSQL(key);
}
@Override
public KVPair getAtLeast(byte[] minKey, byte[] maxKey) {
return SQLKVTransaction.this.getAtLeastSQL(minKey, maxKey);
}
@Override
public KVPair getAtMost(byte[] maxKey, byte[] minKey) {
return SQLKVTransaction.this.getAtMostSQL(maxKey, minKey);
}
@Override
public CloseableIterator<KVPair> getRange(byte[] minKey, byte[] maxKey, boolean reverse) {
return SQLKVTransaction.this.getRangeSQL(minKey, maxKey, reverse);
}
@Override
public void put(byte[] key, byte[] value) {
SQLKVTransaction.this.putSQL(key, value);
}
@Override
public void remove(byte[] key) {
SQLKVTransaction.this.removeSQL(key);
}
@Override
public void removeRange(byte[] minKey, byte[] maxKey) {
SQLKVTransaction.this.removeRangeSQL(minKey, maxKey);
}
}
// ResultSetFunction
private interface ResultSetFunction<T> {
T apply(PreparedStatement preparedStatement, ResultSet resultSet) throws SQLException;
}
// ResultSetIterator
private class ResultSetIterator implements CloseableIterator<KVPair> {
private final PreparedStatement preparedStatement;
private final ResultSet resultSet;
private boolean ready;
private boolean closed;
private byte[] removeKey;
ResultSetIterator(PreparedStatement preparedStatement, ResultSet resultSet) {
assert preparedStatement != null;
assert resultSet != null;
this.resultSet = resultSet;
this.preparedStatement = preparedStatement;
}
// Iterator
@Override
public synchronized boolean hasNext() {
if (this.closed)
return false;
if (this.ready)
return true;
try {
this.ready = this.resultSet.next();
} catch (SQLException e) {
throw SQLKVTransaction.this.handleException(e);
}
if (!this.ready)
this.close();
return this.ready;
}
@Override
public synchronized KVPair next() {
if (!this.hasNext())
throw new NoSuchElementException();
final byte[] key;
final byte[] value;
try {
key = this.resultSet.getBytes(1);
value = this.resultSet.getBytes(2);
} catch (SQLException e) {
throw SQLKVTransaction.this.handleException(e);
}
this.removeKey = key.clone();
this.ready = false;
return new KVPair(key, value);
}
@Override
public synchronized void remove() {
if (this.closed || this.removeKey == null)
throw new IllegalStateException();
SQLKVTransaction.this.remove(this.removeKey);
this.removeKey = null;
}
// Closeable
@Override
public synchronized void close() {
if (this.closed)
return;
this.closed = true;
try {
this.resultSet.close();
} catch (Exception e) {
// ignore
}
try {
this.preparedStatement.close();
} catch (Exception e) {
// ignore
}
}
// Object
@Override
protected void finalize() throws Throwable {
try {
this.close();
} finally {
super.finalize();
}
}
}
// StmtType
/**
* Used internally to build SQL statements.
*/
protected enum StmtType {
GET {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetStatement(), log);
};
},
GET_FIRST {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAllStatement(false)), log);
};
},
GET_LAST {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAllStatement(true)), log);
};
},
GET_AT_LEAST_FORWARD {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAtLeastStatement(false), log);
};
},
GET_AT_LEAST_FORWARD_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAtLeastStatement(false)), log);
};
},
GET_AT_LEAST_REVERSE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAtLeastStatement(true), log);
};
},
GET_AT_LEAST_REVERSE_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAtLeastStatement(true)), log);
};
},
GET_AT_MOST_FORWARD {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAtMostStatement(false), log);
};
},
GET_AT_MOST_FORWARD_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAtMostStatement(false)), log);
};
},
GET_AT_MOST_REVERSE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAtMostStatement(true), log);
};
},
GET_AT_MOST_REVERSE_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAtMostStatement(true)), log);
};
},
GET_RANGE_FORWARD {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetRangeStatement(false), log);
};
},
GET_RANGE_FORWARD_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetRangeStatement(false)), log);
};
},
GET_RANGE_REVERSE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetRangeStatement(true), log);
};
},
GET_RANGE_REVERSE_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetRangeStatement(true)), log);
};
},
GET_ALL_FORWARD {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAllStatement(false), log);
};
},
GET_ALL_REVERSE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAllStatement(true), log);
};
},
PUT {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createPutStatement(), log);
};
},
REMOVE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createRemoveStatement(), log);
};
},
REMOVE_RANGE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createRemoveRangeStatement(), log);
};
},
REMOVE_AT_LEAST {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createRemoveAtLeastStatement(), log);
};
},
REMOVE_AT_MOST {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createRemoveAtMostStatement(), log);
};
},
REMOVE_ALL {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createRemoveAllStatement(), log);
};
};
protected abstract PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException;
protected PreparedStatement prepare(Connection c, String sql, Logger log) throws SQLException {
if (log.isTraceEnabled())
log.trace("preparing SQL statement: " + sql);
return c.prepareStatement(sql);
}
}
}
|
jsimpledb-kv-sql/src/main/java/org/jsimpledb/kv/sql/SQLKVTransaction.java
|
/*
* Copyright (C) 2015 Archie L. Cobbs. All rights reserved.
*/
package org.jsimpledb.kv.sql;
import com.google.common.base.Preconditions;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.NoSuchElementException;
import java.util.concurrent.Future;
import org.jsimpledb.kv.AbstractKVStore;
import org.jsimpledb.kv.CloseableKVStore;
import org.jsimpledb.kv.KVPair;
import org.jsimpledb.kv.KVStore;
import org.jsimpledb.kv.KVTransaction;
import org.jsimpledb.kv.KVTransactionException;
import org.jsimpledb.kv.StaleTransactionException;
import org.jsimpledb.kv.mvcc.MutableView;
import org.jsimpledb.kv.util.ForwardingKVStore;
import org.jsimpledb.util.ByteUtil;
import org.jsimpledb.util.CloseableIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@link SQLKVDatabase} transaction.
*/
public class SQLKVTransaction extends ForwardingKVStore implements KVTransaction {
protected final Logger log = LoggerFactory.getLogger(this.getClass());
protected final SQLKVDatabase database;
protected final Connection connection;
private long timeout;
private boolean readOnly;
private KVStore view;
private boolean closed;
private boolean stale;
/**
* Constructor.
*
* @param database the associated database
* @param connection the {@link Connection} for the transaction
* @throws SQLException if an SQL error occurs
*/
public SQLKVTransaction(SQLKVDatabase database, Connection connection) throws SQLException {
Preconditions.checkArgument(database != null, "null database");
Preconditions.checkArgument(connection != null, "null connection");
this.database = database;
this.connection = connection;
}
@Override
public SQLKVDatabase getKVDatabase() {
return this.database;
}
@Override
public void setTimeout(long timeout) {
Preconditions.checkArgument(timeout >= 0, "timeout < 0");
this.timeout = timeout;
}
/**
* Watch a key to monitor for changes in its value.
*
* <p>
* The implementation in {@link SQLKVTransaction} always throws {@link UnsupportedOperationException}.
* Subclasses may add support using a database-specific notification mechanism.
*
* @param key {@inheritDoc}
* @return {@inheritDoc}
* @throws StaleTransactionException {@inheritDoc}
* @throws org.jsimpledb.kv.RetryTransactionException {@inheritDoc}
* @throws org.jsimpledb.kv.KVDatabaseException {@inheritDoc}
* @throws UnsupportedOperationException {@inheritDoc}
* @throws IllegalArgumentException {@inheritDoc}
*/
@Override
public Future<Void> watchKey(byte[] key) {
throw new UnsupportedOperationException();
}
private synchronized byte[] getSQL(byte[] key) {
if (this.stale)
throw new StaleTransactionException(this);
Preconditions.checkArgument(key != null, "null key");
return this.queryBytes(StmtType.GET, key);
}
private synchronized KVPair getAtLeastSQL(byte[] minKey, byte[] maxKey) {
if (this.stale)
throw new StaleTransactionException(this);
return minKey != null && minKey.length > 0 ?
(maxKey != null ?
this.queryKVPair(StmtType.GET_RANGE_FORWARD_SINGLE, minKey, maxKey) :
this.queryKVPair(StmtType.GET_AT_LEAST_FORWARD_SINGLE, minKey)) :
(maxKey != null ?
this.queryKVPair(StmtType.GET_AT_MOST_FORWARD_SINGLE, maxKey) :
this.queryKVPair(StmtType.GET_FIRST));
}
private synchronized KVPair getAtMostSQL(byte[] maxKey, byte[] minKey) {
if (this.stale)
throw new StaleTransactionException(this);
return maxKey != null ?
(minKey != null && minKey.length > 0 ?
this.queryKVPair(StmtType.GET_RANGE_REVERSE_SINGLE, minKey, maxKey) :
this.queryKVPair(StmtType.GET_AT_MOST_REVERSE_SINGLE, maxKey)) :
(minKey != null && minKey.length > 0 ?
this.queryKVPair(StmtType.GET_AT_LEAST_REVERSE_SINGLE, minKey) :
this.queryKVPair(StmtType.GET_LAST));
}
private synchronized CloseableIterator<KVPair> getRangeSQL(byte[] minKey, byte[] maxKey, boolean reverse) {
if (this.stale)
throw new StaleTransactionException(this);
if (minKey != null && minKey.length == 0)
minKey = null;
if (minKey == null && maxKey == null)
return this.queryIterator(reverse ? StmtType.GET_ALL_REVERSE : StmtType.GET_ALL_FORWARD);
if (minKey == null)
return this.queryIterator(reverse ? StmtType.GET_AT_MOST_REVERSE : StmtType.GET_AT_MOST_FORWARD, maxKey);
if (maxKey == null)
return this.queryIterator(reverse ? StmtType.GET_AT_LEAST_REVERSE : StmtType.GET_AT_LEAST_FORWARD, minKey);
else
return this.queryIterator(reverse ? StmtType.GET_RANGE_REVERSE : StmtType.GET_RANGE_FORWARD, minKey, maxKey);
}
private synchronized void putSQL(byte[] key, byte[] value) {
Preconditions.checkArgument(key != null, "null key");
Preconditions.checkArgument(value != null, "null value");
if (this.stale)
throw new StaleTransactionException(this);
this.update(StmtType.PUT, key, value, value);
}
private synchronized void removeSQL(byte[] key) {
Preconditions.checkArgument(key != null, "null key");
if (this.stale)
throw new StaleTransactionException(this);
this.update(StmtType.REMOVE, key);
}
private synchronized void removeRangeSQL(byte[] minKey, byte[] maxKey) {
if (this.stale)
throw new StaleTransactionException(this);
if (minKey != null && minKey.length == 0)
minKey = null;
if (minKey == null && maxKey == null)
this.update(StmtType.REMOVE_ALL);
else if (minKey == null)
this.update(StmtType.REMOVE_AT_MOST, maxKey);
else if (maxKey == null)
this.update(StmtType.REMOVE_AT_LEAST, minKey);
else
this.update(StmtType.REMOVE_RANGE, minKey, maxKey);
}
@Override
public synchronized boolean isReadOnly() {
return this.readOnly;
}
@Override
public synchronized void setReadOnly(boolean readOnly) {
Preconditions.checkState(this.view == null || readOnly == this.readOnly, "data already accessed");
this.readOnly = readOnly;
}
@Override
protected synchronized KVStore delegate() {
if (this.view == null) {
this.view = new SQLView();
if (this.readOnly && !this.database.rollbackForReadOnly)
this.view = new MutableView(this.view);
}
return this.view;
}
@Override
public synchronized void commit() {
if (this.stale)
throw new StaleTransactionException(this);
this.stale = true;
try {
if (this.readOnly && !(this.view instanceof MutableView))
this.connection.rollback();
else
this.connection.commit();
} catch (SQLException e) {
throw this.handleException(e);
} finally {
this.closeConnection();
}
}
@Override
public synchronized void rollback() {
if (this.stale)
return;
this.stale = true;
try {
this.connection.rollback();
} catch (SQLException e) {
throw this.handleException(e);
} finally {
this.closeConnection();
}
}
@Override
public CloseableKVStore mutableSnapshot() {
throw new UnsupportedOperationException();
}
/**
* Handle an unexpected SQL exception.
*
* <p>
* The implementation in {@link SQLKVTransaction} rolls back the SQL transaction, closes the associated {@link Connection},
* and wraps the exception via {@link SQLKVDatabase#wrapException SQLKVDatabase.wrapException()}.
*
* @param e original exception
* @return key/value transaction exception
*/
protected KVTransactionException handleException(SQLException e) {
this.stale = true;
try {
this.connection.rollback();
} catch (SQLException e2) {
// ignore
} finally {
this.closeConnection();
}
return this.database.wrapException(this, e);
}
/**
* Close the {@link Connection} associated with this instance, if it's not already closed.
* This method is idempotent.
*/
protected void closeConnection() {
if (this.closed)
return;
this.closed = true;
try {
this.connection.close();
} catch (SQLException e) {
// ignore
}
}
@Override
protected void finalize() throws Throwable {
try {
if (!this.stale)
this.log.warn(this + " leaked without commit() or rollback()");
this.closeConnection();
} finally {
super.finalize();
}
}
// Helper methods
private byte[] queryBytes(StmtType stmtType, byte[]... params) {
return this.query(stmtType, (stmt, rs) -> rs.next() ? rs.getBytes(1) : null, true, params);
}
private KVPair queryKVPair(StmtType stmtType, byte[]... params) {
return this.query(stmtType, (stmt, rs) -> rs.next() ? new KVPair(rs.getBytes(1), rs.getBytes(2)) : null, true, params);
}
private CloseableIterator<KVPair> queryIterator(StmtType stmtType, byte[]... params) {
return this.query(stmtType, ResultSetIterator::new, false, params);
}
private <T> T query(StmtType stmtType, ResultSetFunction<T> resultSetFunction, boolean close, byte[]... params) {
try {
final PreparedStatement preparedStatement = stmtType.create(this.database, this.connection, this.log);
final int numParams = preparedStatement.getParameterMetaData().getParameterCount();
for (int i = 0; i < params.length && i < numParams; i++) {
if (this.log.isTraceEnabled())
this.log.trace("setting ?" + (i + 1) + " = " + ByteUtil.toString(params[i]));
preparedStatement.setBytes(i + 1, params[i]);
}
preparedStatement.setQueryTimeout((int)((this.timeout + 999) / 1000));
if (this.log.isTraceEnabled())
this.log.trace("executing SQL query: " + preparedStatement + " in " + this);
final ResultSet resultSet = preparedStatement.executeQuery();
final T result = resultSetFunction.apply(preparedStatement, resultSet);
if (close) {
resultSet.close();
preparedStatement.close();
}
return result;
} catch (SQLException e) {
throw this.handleException(e);
}
}
private void update(StmtType stmtType, byte[]... params) {
try (final PreparedStatement preparedStatement = stmtType.create(this.database, this.connection, this.log)) {
final int numParams = preparedStatement.getParameterMetaData().getParameterCount();
for (int i = 0; i < params.length && i < numParams; i++) {
if (this.log.isTraceEnabled())
this.log.trace("setting ?" + (i + 1) + " = " + ByteUtil.toString(params[i]));
preparedStatement.setBytes(i + 1, params[i]);
}
preparedStatement.setQueryTimeout((int)((this.timeout + 999) / 1000));
if (this.log.isTraceEnabled())
this.log.trace("executing SQL update: " + preparedStatement + " in " + this);
preparedStatement.executeUpdate();
} catch (SQLException e) {
throw this.handleException(e);
}
}
// SQLView
private class SQLView extends AbstractKVStore {
@Override
public byte[] get(byte[] key) {
return SQLKVTransaction.this.getSQL(key);
}
@Override
public KVPair getAtLeast(byte[] minKey, byte[] maxKey) {
return SQLKVTransaction.this.getAtLeastSQL(minKey, maxKey);
}
@Override
public KVPair getAtMost(byte[] maxKey, byte[] minKey) {
return SQLKVTransaction.this.getAtMostSQL(maxKey, minKey);
}
@Override
public CloseableIterator<KVPair> getRange(byte[] minKey, byte[] maxKey, boolean reverse) {
return SQLKVTransaction.this.getRangeSQL(minKey, maxKey, reverse);
}
@Override
public void put(byte[] key, byte[] value) {
SQLKVTransaction.this.putSQL(key, value);
}
@Override
public void remove(byte[] key) {
SQLKVTransaction.this.removeSQL(key);
}
@Override
public void removeRange(byte[] minKey, byte[] maxKey) {
SQLKVTransaction.this.removeRangeSQL(minKey, maxKey);
}
}
// ResultSetFunction
private interface ResultSetFunction<T> {
T apply(PreparedStatement preparedStatement, ResultSet resultSet) throws SQLException;
}
// ResultSetIterator
private class ResultSetIterator implements CloseableIterator<KVPair> {
private final PreparedStatement preparedStatement;
private final ResultSet resultSet;
private boolean ready;
private boolean closed;
private byte[] removeKey;
ResultSetIterator(PreparedStatement preparedStatement, ResultSet resultSet) {
assert preparedStatement != null;
assert resultSet != null;
this.resultSet = resultSet;
this.preparedStatement = preparedStatement;
}
// Iterator
@Override
public synchronized boolean hasNext() {
if (this.closed)
return false;
if (this.ready)
return true;
try {
this.ready = this.resultSet.next();
} catch (SQLException e) {
throw SQLKVTransaction.this.handleException(e);
}
if (!this.ready)
this.close();
return this.ready;
}
@Override
public synchronized KVPair next() {
if (!this.hasNext())
throw new NoSuchElementException();
final byte[] key;
final byte[] value;
try {
key = this.resultSet.getBytes(1);
value = this.resultSet.getBytes(2);
} catch (SQLException e) {
throw SQLKVTransaction.this.handleException(e);
}
this.removeKey = key.clone();
this.ready = false;
return new KVPair(key, value);
}
@Override
public synchronized void remove() {
if (this.closed || this.removeKey == null)
throw new IllegalStateException();
SQLKVTransaction.this.remove(this.removeKey);
this.removeKey = null;
}
// Closeable
@Override
public synchronized void close() {
if (this.closed)
return;
this.closed = true;
try {
this.resultSet.close();
} catch (Exception e) {
// ignore
}
try {
this.preparedStatement.close();
} catch (Exception e) {
// ignore
}
}
// Object
@Override
protected void finalize() throws Throwable {
try {
this.close();
} finally {
super.finalize();
}
}
}
// StmtType
/**
* Used internally to build SQL statements.
*/
enum StmtType {
GET {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetStatement(), log);
};
},
GET_FIRST {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAllStatement(false)), log);
};
},
GET_LAST {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAllStatement(true)), log);
};
},
GET_AT_LEAST_FORWARD {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAtLeastStatement(false), log);
};
},
GET_AT_LEAST_FORWARD_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAtLeastStatement(false)), log);
};
},
GET_AT_LEAST_REVERSE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAtLeastStatement(true), log);
};
},
GET_AT_LEAST_REVERSE_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAtLeastStatement(true)), log);
};
},
GET_AT_MOST_FORWARD {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAtMostStatement(false), log);
};
},
GET_AT_MOST_FORWARD_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAtMostStatement(false)), log);
};
},
GET_AT_MOST_REVERSE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAtMostStatement(true), log);
};
},
GET_AT_MOST_REVERSE_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetAtMostStatement(true)), log);
};
},
GET_RANGE_FORWARD {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetRangeStatement(false), log);
};
},
GET_RANGE_FORWARD_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetRangeStatement(false)), log);
};
},
GET_RANGE_REVERSE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetRangeStatement(true), log);
};
},
GET_RANGE_REVERSE_SINGLE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.limitSingleRow(db.createGetRangeStatement(true)), log);
};
},
GET_ALL_FORWARD {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAllStatement(false), log);
};
},
GET_ALL_REVERSE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createGetAllStatement(true), log);
};
},
PUT {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createPutStatement(), log);
};
},
REMOVE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createRemoveStatement(), log);
};
},
REMOVE_RANGE {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createRemoveRangeStatement(), log);
};
},
REMOVE_AT_LEAST {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createRemoveAtLeastStatement(), log);
};
},
REMOVE_AT_MOST {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createRemoveAtMostStatement(), log);
};
},
REMOVE_ALL {
@Override
protected PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException {
return this.prepare(c, db.createRemoveAllStatement(), log);
};
};
protected abstract PreparedStatement create(SQLKVDatabase db, Connection c, Logger log) throws SQLException;
protected PreparedStatement prepare(Connection c, String sql, Logger log) throws SQLException {
if (log.isTraceEnabled())
log.trace("preparing SQL statement: " + sql);
return c.prepareStatement(sql);
}
}
}
|
Make some methods protected to give subclasses more flexibility.
|
jsimpledb-kv-sql/src/main/java/org/jsimpledb/kv/sql/SQLKVTransaction.java
|
Make some methods protected to give subclasses more flexibility.
|
|
Java
|
apache-2.0
|
cf9bf80797cc938a404fcfc8cb78753d5d7bc081
| 0
|
palantir/atlasdb,palantir/atlasdb,palantir/atlasdb,EvilMcJerkface/atlasdb,EvilMcJerkface/atlasdb,EvilMcJerkface/atlasdb
|
/*
* Copyright 2017 Palantir Technologies, Inc. All rights reserved.
*
* Licensed under the BSD-3 License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.processors;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.SOURCE)
public @interface AutoDelegate {
/**
* The type to be extended. Can be either a class or an interface.
*/
Class typeToExtend();
}
|
atlasdb-processors/src/main/java/com/palantir/processors/AutoDelegate.java
|
/*
* Copyright 2017 Palantir Technologies, Inc. All rights reserved.
*
* Licensed under the BSD-3 License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.processors;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.CLASS)
public @interface AutoDelegate {
/**
* The type to be extended. Can be either a class or an interface.
*/
Class typeToExtend();
}
|
Mark AutoDelegate as source-only (#2594)
* Mark annotations as source-only
* [no release notes]
|
atlasdb-processors/src/main/java/com/palantir/processors/AutoDelegate.java
|
Mark AutoDelegate as source-only (#2594)
|
|
Java
|
apache-2.0
|
a56057f97620ae7b958c338149853d605d8d3604
| 0
|
Overruler/gerrit,anminhsu/gerrit,gerrit-review/gerrit,renchaorevee/gerrit,Overruler/gerrit,thesamet/gerrit,netroby/gerrit,Seinlin/gerrit,thesamet/gerrit,dwhipstock/gerrit,Distrotech/gerrit,netroby/gerrit,anminhsu/gerrit,Distrotech/gerrit,pkdevbox/gerrit,Overruler/gerrit,hdost/gerrit,renchaorevee/gerrit,dwhipstock/gerrit,GerritCodeReview/gerrit,WANdisco/gerrit,WANdisco/gerrit,gcoders/gerrit,Seinlin/gerrit,jackminicloud/test,TonyChai24/test,thesamet/gerrit,thinkernel/gerrit,joshuawilson/merrit,GerritCodeReview/gerrit,Distrotech/gerrit,gcoders/gerrit,renchaorevee/gerrit,TonyChai24/test,TonyChai24/test,thesamet/gerrit,quyixia/gerrit,dwhipstock/gerrit,gcoders/gerrit,Seinlin/gerrit,jackminicloud/test,dwhipstock/gerrit,gcoders/gerrit,Seinlin/gerrit,renchaorevee/gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,qtproject/qtqa-gerrit,quyixia/gerrit,hdost/gerrit,quyixia/gerrit,jackminicloud/test,supriyantomaftuh/gerrit,WANdisco/gerrit,pkdevbox/gerrit,gerrit-review/gerrit,hdost/gerrit,hdost/gerrit,TonyChai24/test,Overruler/gerrit,Overruler/gerrit,supriyantomaftuh/gerrit,thinkernel/gerrit,joshuawilson/merrit,gerrit-review/gerrit,MerritCR/merrit,dwhipstock/gerrit,TonyChai24/test,GerritCodeReview/gerrit,supriyantomaftuh/gerrit,renchaorevee/gerrit,quyixia/gerrit,gcoders/gerrit,Distrotech/gerrit,anminhsu/gerrit,netroby/gerrit,jackminicloud/test,quyixia/gerrit,netroby/gerrit,thinkernel/gerrit,GerritCodeReview/gerrit,netroby/gerrit,thesamet/gerrit,hdost/gerrit,hdost/gerrit,MerritCR/merrit,joshuawilson/merrit,Distrotech/gerrit,MerritCR/merrit,thinkernel/gerrit,jackminicloud/test,Seinlin/gerrit,qtproject/qtqa-gerrit,netroby/gerrit,MerritCR/merrit,quyixia/gerrit,pkdevbox/gerrit,qtproject/qtqa-gerrit,MerritCR/merrit,pkdevbox/gerrit,qtproject/qtqa-gerrit,supriyantomaftuh/gerrit,pkdevbox/gerrit,gerrit-review/gerrit,jackminicloud/test,pkdevbox/gerrit,dwhipstock/gerrit,thinkernel/gerrit,dwhipstock/gerrit,renchaorevee/gerrit,supriyantomaftuh/gerrit,joshuawilson/merrit,gerrit-review/gerrit,MerritCR/merrit,gerrit-review/gerrit,Overruler/gerrit,joshuawilson/merrit,MerritCR/merrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,joshuawilson/merrit,joshuawilson/merrit,Seinlin/gerrit,qtproject/qtqa-gerrit,WANdisco/gerrit,joshuawilson/merrit,thesamet/gerrit,GerritCodeReview/gerrit,netroby/gerrit,qtproject/qtqa-gerrit,Distrotech/gerrit,quyixia/gerrit,anminhsu/gerrit,pkdevbox/gerrit,anminhsu/gerrit,TonyChai24/test,Seinlin/gerrit,gcoders/gerrit,thinkernel/gerrit,gcoders/gerrit,TonyChai24/test,supriyantomaftuh/gerrit,hdost/gerrit,jackminicloud/test,thesamet/gerrit,GerritCodeReview/gerrit,Distrotech/gerrit,MerritCR/merrit,anminhsu/gerrit,renchaorevee/gerrit,thinkernel/gerrit,WANdisco/gerrit,WANdisco/gerrit,supriyantomaftuh/gerrit,gerrit-review/gerrit,anminhsu/gerrit
|
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.sshd;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.gerrit.audit.AuditService;
import com.google.gerrit.audit.SshAuditEvent;
import com.google.gerrit.common.TimeUtil;
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.PeerDaemonUser;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.util.IdGenerator;
import com.google.gerrit.server.util.SystemLog;
import com.google.gerrit.sshd.SshScope.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import org.apache.log4j.AsyncAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import org.eclipse.jgit.lib.Config;
@Singleton
class SshLog implements LifecycleListener {
private static final Logger log = Logger.getLogger(SshLog.class);
private static final String LOG_NAME = "sshd_log";
private static final String P_SESSION = "session";
private static final String P_USER_NAME = "userName";
private static final String P_ACCOUNT_ID = "accountId";
private static final String P_WAIT = "queueWaitTime";
private static final String P_EXEC = "executionTime";
private static final String P_STATUS = "status";
private final Provider<SshSession> session;
private final Provider<Context> context;
private final AsyncAppender async;
private final AuditService auditService;
@Inject
SshLog(final Provider<SshSession> session, final Provider<Context> context,
SystemLog systemLog, @GerritServerConfig Config config,
AuditService auditService) {
this.session = session;
this.context = context;
this.auditService = auditService;
if (!config.getBoolean("sshd", "requestLog", true)) {
async = null;
return;
}
async = systemLog.createAsyncAppender(LOG_NAME, new SshLogLayout());
}
@Override
public void start() {
}
@Override
public void stop() {
if (async != null) {
async.close();
}
}
void onLogin() {
LoggingEvent entry =
log("LOGIN FROM " + session.get().getRemoteAddressAsString());
if (async != null) {
async.append(entry);
}
audit(context.get(), "0", "LOGIN");
}
void onAuthFail(final SshSession sd) {
final LoggingEvent event = new LoggingEvent( //
Logger.class.getName(), // fqnOfCategoryClass
log, // logger
TimeUtil.nowMs(), // when
Level.INFO, // level
"AUTH FAILURE FROM " + sd.getRemoteAddressAsString(), // message text
"SSHD", // thread name
null, // exception information
null, // current NDC string
null, // caller location
null // MDC properties
);
event.setProperty(P_SESSION, id(sd.getSessionId()));
event.setProperty(P_USER_NAME, sd.getUsername());
final String error = sd.getAuthenticationError();
if (error != null) {
event.setProperty(P_STATUS, error);
}
if (async != null) {
async.append(event);
}
audit(null, "FAIL", "AUTH");
}
void onExecute(DispatchCommand dcmd, int exitValue) {
final Context ctx = context.get();
ctx.finished = TimeUtil.nowMs();
String cmd = extractWhat(dcmd);
final LoggingEvent event = log(cmd);
event.setProperty(P_WAIT, (ctx.started - ctx.created) + "ms");
event.setProperty(P_EXEC, (ctx.finished - ctx.started) + "ms");
final String status;
switch (exitValue) {
case BaseCommand.STATUS_CANCEL:
status = "killed";
break;
case BaseCommand.STATUS_NOT_FOUND:
status = "not-found";
break;
case BaseCommand.STATUS_NOT_ADMIN:
status = "not-admin";
break;
default:
status = String.valueOf(exitValue);
break;
}
event.setProperty(P_STATUS, status);
if (async != null) {
async.append(event);
}
audit(context.get(), status, dcmd);
}
private Multimap<String, ?> extractParameters(DispatchCommand dcmd) {
String[] cmdArgs = dcmd.getArguments();
String paramName = null;
int argPos = 0;
Multimap<String, String> parms = ArrayListMultimap.create();
for (int i = 2; i < cmdArgs.length; i++) {
String arg = cmdArgs[i];
// -- stop parameters parsing
if (arg.equals("--")) {
for (i++; i < cmdArgs.length; i++) {
parms.put("$" + argPos++, cmdArgs[i]);
}
break;
}
// --param=value
int eqPos = arg.indexOf('=');
if (arg.startsWith("--") && eqPos > 0) {
parms.put(arg.substring(0, eqPos), arg.substring(eqPos + 1));
continue;
}
// -p value or --param value
if (arg.startsWith("-")) {
if (paramName != null) {
parms.put(paramName, null);
}
paramName = arg;
continue;
}
// value
if (paramName == null) {
parms.put("$" + argPos++, arg);
} else {
parms.put(paramName, arg);
paramName = null;
}
}
if (paramName != null) {
parms.put(paramName, null);
}
return parms;
}
void onLogout() {
LoggingEvent entry = log("LOGOUT");
if (async != null) {
async.append(entry);
}
audit(context.get(), "0", "LOGOUT");
}
private LoggingEvent log(final String msg) {
final SshSession sd = session.get();
final CurrentUser user = sd.getCurrentUser();
final LoggingEvent event = new LoggingEvent( //
Logger.class.getName(), // fqnOfCategoryClass
log, // logger
TimeUtil.nowMs(), // when
Level.INFO, // level
msg, // message text
"SSHD", // thread name
null, // exception information
null, // current NDC string
null, // caller location
null // MDC properties
);
event.setProperty(P_SESSION, id(sd.getSessionId()));
String userName = "-", accountId = "-";
if (user != null && user.isIdentifiedUser()) {
IdentifiedUser u = (IdentifiedUser) user;
userName = u.getAccount().getUserName();
accountId = "a/" + u.getAccountId().toString();
} else if (user instanceof PeerDaemonUser) {
userName = PeerDaemonUser.USER_NAME;
}
event.setProperty(P_USER_NAME, userName);
event.setProperty(P_ACCOUNT_ID, accountId);
return event;
}
private static String id(final int id) {
return IdGenerator.format(id);
}
void audit(Context ctx, Object result, String cmd) {
audit(ctx, result, cmd, null);
}
void audit(Context ctx, Object result, DispatchCommand cmd) {
audit(ctx, result, extractWhat(cmd), extractParameters(cmd));
}
private void audit(Context ctx, Object result, String cmd, Multimap<String, ?> params) {
String sessionId;
CurrentUser currentUser;
long created;
if (ctx == null) {
sessionId = null;
currentUser = null;
created = TimeUtil.nowMs();
} else {
SshSession session = ctx.getSession();
sessionId = IdGenerator.format(session.getSessionId());
currentUser = session.getCurrentUser();
created = ctx.created;
}
auditService.dispatch(new SshAuditEvent(sessionId, currentUser,
cmd, created, params, result));
}
private String extractWhat(DispatchCommand dcmd) {
String commandName = dcmd.getCommandName();
for (String arg : dcmd.getArguments()) {
commandName = commandName + "." + arg;
}
return commandName;
}
}
|
gerrit-sshd/src/main/java/com/google/gerrit/sshd/SshLog.java
|
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.sshd;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.gerrit.audit.AuditService;
import com.google.gerrit.audit.SshAuditEvent;
import com.google.gerrit.common.TimeUtil;
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.PeerDaemonUser;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.util.IdGenerator;
import com.google.gerrit.server.util.SystemLog;
import com.google.gerrit.sshd.SshScope.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import org.apache.log4j.AsyncAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import org.eclipse.jgit.lib.Config;
@Singleton
class SshLog implements LifecycleListener {
private static final Logger log = Logger.getLogger(SshLog.class);
private static final String LOG_NAME = "sshd_log";
private static final String P_SESSION = "session";
private static final String P_USER_NAME = "userName";
private static final String P_ACCOUNT_ID = "accountId";
private static final String P_WAIT = "queueWaitTime";
private static final String P_EXEC = "executionTime";
private static final String P_STATUS = "status";
private final Provider<SshSession> session;
private final Provider<Context> context;
private final AsyncAppender async;
private final AuditService auditService;
@Inject
SshLog(final Provider<SshSession> session, final Provider<Context> context,
SystemLog systemLog, @GerritServerConfig Config config,
AuditService auditService) {
this.session = session;
this.context = context;
this.auditService = auditService;
if (!config.getBoolean("sshd", "requestLog", true)) {
async = null;
return;
}
async = systemLog.createAsyncAppender(LOG_NAME, new SshLogLayout());
}
@Override
public void start() {
}
@Override
public void stop() {
if (async != null) {
async.close();
}
}
void onLogin() {
LoggingEvent entry =
log("LOGIN FROM " + session.get().getRemoteAddressAsString());
if (async != null) {
async.append(entry);
}
audit(context.get(), "0", "LOGIN");
}
void onAuthFail(final SshSession sd) {
final LoggingEvent event = new LoggingEvent( //
Logger.class.getName(), // fqnOfCategoryClass
log, // logger
TimeUtil.nowMs(), // when
Level.INFO, // level
"AUTH FAILURE FROM " + sd.getRemoteAddressAsString(), // message text
"SSHD", // thread name
null, // exception information
null, // current NDC string
null, // caller location
null // MDC properties
);
event.setProperty(P_SESSION, id(sd.getSessionId()));
event.setProperty(P_USER_NAME, sd.getUsername());
final String error = sd.getAuthenticationError();
if (error != null) {
event.setProperty(P_STATUS, error);
}
if (async != null) {
async.append(event);
}
audit(null, "FAIL", "AUTH");
}
void onExecute(DispatchCommand dcmd, int exitValue) {
final Context ctx = context.get();
ctx.finished = TimeUtil.nowMs();
String cmd = extractWhat(dcmd);
final LoggingEvent event = log(cmd);
event.setProperty(P_WAIT, (ctx.started - ctx.created) + "ms");
event.setProperty(P_EXEC, (ctx.finished - ctx.started) + "ms");
final String status;
switch (exitValue) {
case BaseCommand.STATUS_CANCEL:
status = "killed";
break;
case BaseCommand.STATUS_NOT_FOUND:
status = "not-found";
break;
case BaseCommand.STATUS_NOT_ADMIN:
status = "not-admin";
break;
default:
status = String.valueOf(exitValue);
break;
}
event.setProperty(P_STATUS, status);
if (async != null) {
async.append(event);
}
audit(context.get(), status, dcmd);
}
private Multimap<String, ?> extractParameters(DispatchCommand dcmd) {
String[] cmdArgs = dcmd.getArguments();
String paramName = null;
int argPos = 0;
Multimap<String, String> parms = ArrayListMultimap.create();
for (int i = 2; i < cmdArgs.length; i++) {
String arg = cmdArgs[i];
// -- stop parameters parsing
if (arg.equals("--")) {
for (i++; i < cmdArgs.length; i++) {
parms.put("$" + argPos++, cmdArgs[i]);
}
break;
}
// --param=value
int eqPos = arg.indexOf('=');
if (arg.startsWith("--") && eqPos > 0) {
parms.put(arg.substring(0, eqPos), arg.substring(eqPos + 1));
continue;
}
// -p value or --param value
if (arg.startsWith("-")) {
if (paramName != null) {
parms.put(paramName, null);
}
paramName = arg;
continue;
}
// value
if (paramName == null) {
parms.put("$" + argPos++, arg);
} else {
parms.put(paramName, arg);
paramName = null;
}
}
if (paramName != null) {
parms.put(paramName, null);
}
return parms;
}
void onLogout() {
LoggingEvent entry = log("LOGOUT");
if (async != null) {
async.append(entry);
}
audit(context.get(), "0", "LOGOUT");
}
private LoggingEvent log(final String msg) {
final SshSession sd = session.get();
final CurrentUser user = sd.getCurrentUser();
final LoggingEvent event = new LoggingEvent( //
Logger.class.getName(), // fqnOfCategoryClass
log, // logger
TimeUtil.nowMs(), // when
Level.INFO, // level
msg, // message text
"SSHD", // thread name
null, // exception information
null, // current NDC string
null, // caller location
null // MDC properties
);
event.setProperty(P_SESSION, id(sd.getSessionId()));
String userName = "-", accountId = "-";
if (user != null && user.isIdentifiedUser()) {
IdentifiedUser u = (IdentifiedUser) user;
userName = u.getAccount().getUserName();
accountId = "a/" + u.getAccountId().toString();
} else if (user instanceof PeerDaemonUser) {
userName = PeerDaemonUser.USER_NAME;
}
event.setProperty(P_USER_NAME, userName);
event.setProperty(P_ACCOUNT_ID, accountId);
return event;
}
private static String id(final int id) {
return IdGenerator.format(id);
}
void audit(Context ctx, Object result, String cmd) {
audit(ctx, result, cmd, null);
}
void audit(Context ctx, Object result, DispatchCommand cmd) {
audit(ctx, result, extractWhat(cmd), extractParameters(cmd));
}
private void audit(Context ctx, Object result, String cmd, Multimap<String, ?> params) {
String sessionId;
CurrentUser currentUser;
long created;
if (ctx == null) {
sessionId = null;
currentUser = null;
created = TimeUtil.nowMs();
} else {
SshSession session = ctx.getSession();
sessionId = IdGenerator.format(session.getSessionId());
currentUser = session.getCurrentUser();
created = ctx.created;
}
auditService.dispatch(new SshAuditEvent(sessionId, currentUser,
cmd, created, params, result));
}
private String extractWhat(DispatchCommand dcmd) {
String commandName = dcmd.getCommandName();
String[] args = dcmd.getArguments();
if (args.length > 1) {
return commandName + "." + args[1];
}
return commandName;
}
}
|
Include all command arguments in SSH log entry
Previously the SSH log only included the first argument. This
prevented the repository name from being logged when
'git receive-pack' was executed instead of 'git-receive-pack'.
Now the SSH log includes all command arguments in the log ensuring
that the repository name is always logged. This is desirable behavior
for anyone looking to monitor repository access via the SSH log.
Change-Id: Idff950e5480a122a2cb366a443d25aa9e0a8f5c8
|
gerrit-sshd/src/main/java/com/google/gerrit/sshd/SshLog.java
|
Include all command arguments in SSH log entry
|
|
Java
|
apache-2.0
|
2c160215a962e7d97bf09e7e51cc8b93b75595c6
| 0
|
tadeegan/eiger,tadeegan/eiger,tadeegan/eiger-application-aware,tadeegan/eiger,tadeegan/eiger-application-aware,tadeegan/eiger-application-aware
|
package deegan;
import java.io.IOException;
import java.util.*;
import java.util.Map.Entry;
import java.nio.ByteBuffer;
import org.apache.cassandra.client.ClientLibrary;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.config.ConfigurationException;
import org.apache.cassandra.config.KSMetaData;
import org.apache.cassandra.locator.NetworkTopologyStrategy;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.thrift.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.LamportClock;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TFramedTransport;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.hsqldb.SchemaManager;
import org.apache.cassandra.locator.SimpleStrategy;
public class TestClient {
private final int DEFAULT_THRIFT_PORT = 9160;
private final String MAIN_KEYSPACE = "KeySpace1";
private final String MAIN_COLUMN_FAMILY = "ColumnFam1";
private Map<String, Integer> localServerIPAndPorts = new HashMap<String, Integer>();
private List<Map<String, Integer>> dcToServerIPAndPorts = null;
private ConsistencyLevel consistencyLevel;
public static void main(String[] args) {
TestClient client = new TestClient();
}
/**
* Constructor
*/
public TestClient() {
this.setup();
try{
print("yoyo");
this.trySomePutsAndGets();
}
catch(Exception e){
e.printStackTrace();
}
}
/**
* Prints to System out
* @param str
*/
private void print(String str) {
System.out.println(str);
}
//// Helpers
private static Column newColumn(String name) {
return new Column(ByteBufferUtil.bytes(name));
}
private static Column newColumn(String name, String value) {
return new Column(ByteBufferUtil.bytes(name)).setValue(ByteBufferUtil.bytes(value)).setTimestamp(0L);
}
private static Column newColumn(String name, String value, long timestamp) {
return new Column(ByteBufferUtil.bytes(name)).setValue(ByteBufferUtil.bytes(value)).setTimestamp(timestamp);
}
private static CounterColumn newCounterColumn(String name, long value) {
return new CounterColumn(ByteBufferUtil.bytes(name), value);
}
private void waitForKeyspacePropagation(Map<String, Integer> allServerIPAndPorts, String keyspace) throws TException
{
System.out.println("Waiting for key propagation...");
for (Entry<String, Integer> ipAndPort : allServerIPAndPorts.entrySet()) {
String ip = ipAndPort.getKey();
Integer port = ipAndPort.getValue();
TTransport tFramedTransport = new TFramedTransport(new TSocket(ip, port));
TProtocol binaryProtoOnFramed = new TBinaryProtocol(tFramedTransport);
Cassandra.Client client = new Cassandra.Client(binaryProtoOnFramed);
tFramedTransport.open();
// FIXME: This is a hideous way to ensure the earlier system_add_keyspace has propagated everywhere
while(true) {
try {
client.set_keyspace(keyspace, LamportClock.sendTimestamp());
break;
} catch (InvalidRequestException e) {
try {
Thread.sleep(1000);
} catch (InterruptedException e1) {
//ignore
}
}
}
}
System.out.println("Keys propagated.");
}
/**
* Creates a keyspace if it dosent already exist
* @param client the cassandra thrift client
* @param name the name of the keyspace to create
* @throws TException
* @throws InvalidRequestException
*/
private void setupKeyspace(Cassandra.Iface client, String keyspace) throws TException, InvalidRequestException
{
List<KsDef> yo = client.describe_keyspaces();
print("Current Keyspaces: -------");
for(KsDef def: yo){
print(def.name);
if(def.name.equals(keyspace)){
print(keyspace + " already exists with strategy: " + def.getStrategy_class() + "... continue");
return;
}
}
print("---------------------------");
List<CfDef> cfDefList = new ArrayList<CfDef>();
CfDef columnFamily = new CfDef(keyspace, MAIN_COLUMN_FAMILY);
cfDefList.add(columnFamily);
try
{
KsDef keySpaceDefenition = new KsDef();
keySpaceDefenition.name = keyspace;
keySpaceDefenition.strategy_class = SimpleStrategy.class.getName();
if (keySpaceDefenition.strategy_options == null)
keySpaceDefenition.strategy_options = new LinkedHashMap<String, String>();
keySpaceDefenition.strategy_options.put("replication_factor", "1");
keySpaceDefenition.cf_defs = cfDefList;
client.system_add_keyspace(keySpaceDefenition);
print("got this far");
int magnitude = client.describe_ring(keyspace).size();
print("magnitude: " + magnitude);
try
{
Thread.sleep(1000 * magnitude);
}
catch (InterruptedException e)
{
throw new RuntimeException(e);
}
}
catch (InvalidRequestException probablyExists)
{
System.out.println("Problem creating keyspace: " + probablyExists.getMessage());
}
catch (Exception e){
print("excpetion here now...");
e.printStackTrace();
}
}
/**
* Modified from cops2 unit tests
*/
private void setup() {
print("setup started");
Integer numDatacenters = 1;
Integer nodesPerDatacenter = 1;
HashMap<String, Integer> localServerIPAndPorts = new HashMap<String, Integer>();
for (int i = 1; i <= nodesPerDatacenter; ++i) {
localServerIPAndPorts.put("127.0.0." + i, DEFAULT_THRIFT_PORT);
}
try{
//Create a keyspace with a replication factor of 1 for each datacenter
TTransport tr = new TFramedTransport(new TSocket("localhost", DEFAULT_THRIFT_PORT));
TProtocol proto = new TBinaryProtocol(tr);
Cassandra.Client client = new Cassandra.Client(proto);
tr.open();
this.setupKeyspace(client, MAIN_KEYSPACE);
this.waitForKeyspacePropagation(localServerIPAndPorts, MAIN_KEYSPACE);
}catch(Exception c){
System.out.println("An exception occured: " + c);
return;
}
this.localServerIPAndPorts = localServerIPAndPorts;
this.consistencyLevel = ConsistencyLevel.ONE;
print("setup done");
}
private void trySomePutsAndGets() throws Exception {
//setup the client library
ClientLibrary lib = new ClientLibrary(this.localServerIPAndPorts, MAIN_KEYSPACE, this.consistencyLevel);
//intialize some sample keys
ByteBuffer key1 = ByteBufferUtil.bytes("tdeegan2");
ByteBuffer key2 = ByteBufferUtil.bytes("ltseng3");
String firstNameColumn = "first_name";
ByteBuffer firstNameColumnBuffer = ByteBufferUtil.bytes(firstNameColumn);
ColumnParent columnParent = new ColumnParent(MAIN_COLUMN_FAMILY);
long timestamp = System.currentTimeMillis();
try{
lib.insert(key1, columnParent, newColumn(firstNameColumn, "thomas", timestamp));
lib.insert(key2, columnParent, newColumn(firstNameColumn, "lewis", timestamp));
}
catch(InvalidRequestException e){
print("invalid request: ");
e.printStackTrace();
}
print("got this far");
ColumnPath cp = new ColumnPath(MAIN_COLUMN_FAMILY);
cp.column = firstNameColumnBuffer;
ColumnOrSuperColumn got1 = lib.get(key1, cp);
ColumnOrSuperColumn got2 = lib.get(key2, cp);
print("-- tdeegan2: " + new String(got1.getColumn().getValue()));
print("-- ltseng3: " + new String(got2.getColumn().getValue()));
print("done with insert");
}
}
|
deegan_client/src/TestClient.java
|
package deegan;
import java.io.IOException;
import java.util.*;
import java.util.Map.Entry;
import java.nio.ByteBuffer;
import org.apache.cassandra.CleanupHelper;
import org.apache.cassandra.client.ClientLibrary;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.config.ConfigurationException;
import org.apache.cassandra.config.KSMetaData;
import org.apache.cassandra.cops2.Cops2Test;
import org.apache.cassandra.locator.NetworkTopologyStrategy;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.thrift.*;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.LamportClock;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TFramedTransport;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
import org.hsqldb.SchemaManager;
import org.apache.cassandra.locator.SimpleStrategy;
public class TestClient {
private final int DEFAULT_THRIFT_PORT = 9160;
private final String MAIN_KEYSPACE = "KeySpace1";
private final String MAIN_COLUMN_FAMILY = "ColumnFam1";
private Map<String, Integer> localServerIPAndPorts = new HashMap<String, Integer>();
private List<Map<String, Integer>> dcToServerIPAndPorts = null;
private ConsistencyLevel consistencyLevel;
public static void main(String[] args) {
TestClient client = new TestClient();
}
/**
* Constructor
*/
public TestClient() {
this.setup();
try{
print("yoyo");
this.trySomePutsAndGets();
}
catch(Exception e){
e.printStackTrace();
}
}
/**
* Prints to System out
* @param str
*/
private void print(String str) {
System.out.println(str);
}
//// Helpers
private static Column newColumn(String name) {
return new Column(ByteBufferUtil.bytes(name));
}
private static Column newColumn(String name, String value) {
return new Column(ByteBufferUtil.bytes(name)).setValue(ByteBufferUtil.bytes(value)).setTimestamp(0L);
}
private static Column newColumn(String name, String value, long timestamp) {
return new Column(ByteBufferUtil.bytes(name)).setValue(ByteBufferUtil.bytes(value)).setTimestamp(timestamp);
}
private static CounterColumn newCounterColumn(String name, long value) {
return new CounterColumn(ByteBufferUtil.bytes(name), value);
}
private void waitForKeyspacePropagation(Map<String, Integer> allServerIPAndPorts, String keyspace) throws TException
{
System.out.println("Waiting for key propagation...");
for (Entry<String, Integer> ipAndPort : allServerIPAndPorts.entrySet()) {
String ip = ipAndPort.getKey();
Integer port = ipAndPort.getValue();
TTransport tFramedTransport = new TFramedTransport(new TSocket(ip, port));
TProtocol binaryProtoOnFramed = new TBinaryProtocol(tFramedTransport);
Cassandra.Client client = new Cassandra.Client(binaryProtoOnFramed);
tFramedTransport.open();
// FIXME: This is a hideous way to ensure the earlier system_add_keyspace has propagated everywhere
while(true) {
try {
client.set_keyspace(keyspace, LamportClock.sendTimestamp());
break;
} catch (InvalidRequestException e) {
try {
Thread.sleep(1000);
} catch (InterruptedException e1) {
//ignore
}
}
}
}
System.out.println("Keys propagated.");
}
/**
* Creates a keyspace if it dosent already exist
* @param client the cassandra thrift client
* @param name the name of the keyspace to create
* @throws TException
* @throws InvalidRequestException
*/
private void setupKeyspace(Cassandra.Iface client, String keyspace) throws TException, InvalidRequestException
{
List<KsDef> yo = client.describe_keyspaces();
print("Current Keyspaces: -------");
for(KsDef def: yo){
print(def.name);
if(def.name.equals(keyspace)){
print(keyspace + " already exists with strategy: " + def.getStrategy_class() + "... continue");
return;
}
}
print("---------------------------");
List<CfDef> cfDefList = new ArrayList<CfDef>();
CfDef columnFamily = new CfDef(keyspace, MAIN_COLUMN_FAMILY);
cfDefList.add(columnFamily);
try
{
KsDef keySpaceDefenition = new KsDef();
keySpaceDefenition.name = keyspace;
keySpaceDefenition.strategy_class = SimpleStrategy.class.getName();
if (keySpaceDefenition.strategy_options == null)
keySpaceDefenition.strategy_options = new LinkedHashMap<String, String>();
keySpaceDefenition.strategy_options.put("replication_factor", "1");
keySpaceDefenition.cf_defs = cfDefList;
client.system_add_keyspace(keySpaceDefenition);
print("got this far");
int magnitude = client.describe_ring(keyspace).size();
print("magnitude: " + magnitude);
try
{
Thread.sleep(1000 * magnitude);
}
catch (InterruptedException e)
{
throw new RuntimeException(e);
}
}
catch (InvalidRequestException probablyExists)
{
System.out.println("Problem creating keyspace: " + probablyExists.getMessage());
}
catch (Exception e){
print("excpetion here now...");
e.printStackTrace();
}
}
/**
* Modified from cops2 unit tests
*/
private void setup() {
print("setup started");
Integer numDatacenters = 1;
Integer nodesPerDatacenter = 1;
HashMap<String, Integer> localServerIPAndPorts = new HashMap<String, Integer>();
for (int i = 1; i <= nodesPerDatacenter; ++i) {
localServerIPAndPorts.put("127.0.0." + i, DEFAULT_THRIFT_PORT);
}
try{
//Create a keyspace with a replication factor of 1 for each datacenter
TTransport tr = new TFramedTransport(new TSocket("localhost", DEFAULT_THRIFT_PORT));
TProtocol proto = new TBinaryProtocol(tr);
Cassandra.Client client = new Cassandra.Client(proto);
tr.open();
this.setupKeyspace(client, MAIN_KEYSPACE);
this.waitForKeyspacePropagation(localServerIPAndPorts, MAIN_KEYSPACE);
}catch(Exception c){
System.out.println("An exception occured: " + c);
return;
}
this.localServerIPAndPorts = localServerIPAndPorts;
this.consistencyLevel = ConsistencyLevel.ONE;
print("setup done");
}
private void trySomePutsAndGets() throws Exception {
//setup the client library
ClientLibrary lib = new ClientLibrary(this.localServerIPAndPorts, MAIN_KEYSPACE, this.consistencyLevel);
//intialize some sample keys
ByteBuffer key1 = ByteBufferUtil.bytes("tdeegan2");
ByteBuffer key2 = ByteBufferUtil.bytes("ltseng3");
String firstNameColumn = "first_name";
ByteBuffer firstNameColumnBuffer = ByteBufferUtil.bytes(firstNameColumn);
ColumnParent columnParent = new ColumnParent(MAIN_COLUMN_FAMILY);
long timestamp = System.currentTimeMillis();
try{
lib.insert(key1, columnParent, newColumn(firstNameColumn, "thomas", timestamp));
lib.insert(key2, columnParent, newColumn(firstNameColumn, "lewis", timestamp));
}
catch(InvalidRequestException e){
print("invalid request: ");
e.printStackTrace();
}
print("got this far");
ColumnPath cp = new ColumnPath(MAIN_COLUMN_FAMILY);
cp.column = firstNameColumnBuffer;
ColumnOrSuperColumn got1 = lib.get(key1, cp);
ColumnOrSuperColumn got2 = lib.get(key2, cp);
print("-- tdeegan2: " + new String(got1.getColumn().getValue()));
print("-- ltseng3: " + new String(got2.getColumn().getValue()));
print("done with insert");
}
}
|
remove imports
|
deegan_client/src/TestClient.java
|
remove imports
|
|
Java
|
apache-2.0
|
b6d475ca9f9de964c23b7599856752ae3d571f08
| 0
|
vivchar/RendererRecyclerViewAdapter
|
package com.github.vivchar.immutableadapter;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.widget.Toast;
import com.github.vivchar.immutableadapter.items.BaseItemModel;
import com.github.vivchar.immutableadapter.items.category.CategoryModel;
import com.github.vivchar.immutableadapter.items.category.CategoryViewRenderer;
import com.github.vivchar.immutableadapter.items.content.ContentModel;
import com.github.vivchar.immutableadapter.items.content.ContentViewRenderer;
import com.github.vivchar.immutableadapter.items.header.HeaderModel;
import com.github.vivchar.immutableadapter.items.header.HeaderViewRenderer;
import com.github.vivchar.rendererrecyclerviewadapter.RendererRecyclerViewAdapter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Random;
public
class MainActivity
extends AppCompatActivity
{
public static final int MAX_SPAN_COUNT = 3;
private RendererRecyclerViewAdapter mRecyclerViewAdapter;
private RecyclerView mRecyclerView;
private GridLayoutManager mLayoutManager;
private SwipeRefreshLayout mSwipeToRefresh;
@Override
protected
void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mSwipeToRefresh = (SwipeRefreshLayout) findViewById(R.id.refresh);
mSwipeToRefresh.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener()
{
@Override
public
void onRefresh() {
updateItems();
}
});
mRecyclerViewAdapter = new RendererRecyclerViewAdapter();
mRecyclerViewAdapter.registerRenderer(new HeaderViewRenderer(HeaderModel.TYPE, this));
mRecyclerViewAdapter.registerRenderer(new CategoryViewRenderer(CategoryModel.TYPE, this, mListener));
mRecyclerViewAdapter.registerRenderer(new ContentViewRenderer(ContentModel.TYPE, this, mListener));
mLayoutManager = new GridLayoutManager(this, MAX_SPAN_COUNT);
mLayoutManager.setSpanSizeLookup(new GridLayoutManager.SpanSizeLookup()
{
@Override
public
int getSpanSize(final int position) {
switch (mRecyclerViewAdapter.getItemViewType(position)) {
case ContentModel.TYPE:
return 1;
case CategoryModel.TYPE:
default:
return 3;
}
}
});
mRecyclerView = (RecyclerView) findViewById(R.id.recycler_view);
mRecyclerView.setLayoutManager(mLayoutManager);
mRecyclerView.setAdapter(mRecyclerViewAdapter);
mRecyclerView.addItemDecoration(new EqualSpacesItemDecoration(20));
updateItems();
}
private
void updateItems() {
// mRecyclerViewAdapter.setItems(getItems());
// mRecyclerViewAdapter.notifyDataSetChanged();
mRecyclerViewAdapter.setItems(getItems(), mDiffCallback);
mSwipeToRefresh.setRefreshing(false);
}
@NonNull
private
ArrayList<BaseItemModel> getItems() {
final ArrayList<BaseItemModel> items = new ArrayList<>();
final int headerID = 1;
items.add(new HeaderModel(headerID, "header"));
final int categoryCount = random(3, 9);
for (int i = 0; i < categoryCount; i++) {
final int categoryID = i * 10;
items.add(new CategoryModel(categoryID, "some category #" + (i + 1)));
final ArrayList<BaseItemModel> content = new ArrayList<>();
final int contentCount = random(1, 9);
for (int j = 0; j < contentCount; j++) {
final int contentID = i * 10 + j;
content.add(new ContentModel(contentID, "content " + (j + 1)));
}
Collections.shuffle(content);
items.addAll(content);
}
return items;
}
private
int random(final int min, final int max) {
final Random random = new Random();
return random.nextInt(max - min + 1) + min;
}
@NonNull
private final ContentViewRenderer.Listener mListener = new ContentViewRenderer.Listener()
{
@Override
public
void onCategoryClicked(@NonNull final CategoryModel model) {
Toast.makeText(MainActivity.this, model.getName(), Toast.LENGTH_SHORT).show();
}
@Override
public
void onContentItemClicked(@NonNull final ContentModel model) {
Toast.makeText(MainActivity.this, model.getName(), Toast.LENGTH_SHORT).show();
}
};
@NonNull
private final RendererRecyclerViewAdapter.DiffCallback<BaseItemModel> mDiffCallback = new RendererRecyclerViewAdapter
.DefaultDiffCallback<BaseItemModel>()
{
@Override
public
boolean areItemsTheSame(final BaseItemModel oldItem, final BaseItemModel newItem) {
return oldItem.getID() == newItem.getID();
}
@Override
public
boolean areContentsTheSame(final BaseItemModel oldItem, final BaseItemModel newItem) {
return oldItem.equals(newItem);
}
};
}
|
example/src/main/java/com/github/vivchar/immutableadapter/MainActivity.java
|
package com.github.vivchar.immutableadapter;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.widget.Toast;
import com.github.vivchar.immutableadapter.items.BaseItemModel;
import com.github.vivchar.immutableadapter.items.category.CategoryModel;
import com.github.vivchar.immutableadapter.items.category.CategoryViewRenderer;
import com.github.vivchar.immutableadapter.items.content.ContentModel;
import com.github.vivchar.immutableadapter.items.content.ContentViewRenderer;
import com.github.vivchar.immutableadapter.items.header.HeaderModel;
import com.github.vivchar.immutableadapter.items.header.HeaderViewRenderer;
import com.github.vivchar.rendererrecyclerviewadapter.RendererRecyclerViewAdapter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Random;
public
class MainActivity
extends AppCompatActivity
{
public static final int MAX_SPAN_COUNT = 3;
private RendererRecyclerViewAdapter mRecyclerViewAdapter;
private RecyclerView mRecyclerView;
private GridLayoutManager mLayoutManager;
private SwipeRefreshLayout mSwipeToRefresh;
@Override
protected
void onCreate(final Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
mSwipeToRefresh = (SwipeRefreshLayout) findViewById(R.id.refresh);
mSwipeToRefresh.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener()
{
@Override
public
void onRefresh() {
updateItems();
}
});
mRecyclerViewAdapter = new RendererRecyclerViewAdapter();
mRecyclerViewAdapter.registerRenderer(new HeaderViewRenderer(HeaderModel.TYPE, this));
mRecyclerViewAdapter.registerRenderer(new CategoryViewRenderer(CategoryModel.TYPE, this, mListener));
mRecyclerViewAdapter.registerRenderer(new ContentViewRenderer(ContentModel.TYPE, this, mListener));
mLayoutManager = new GridLayoutManager(this, MAX_SPAN_COUNT);
mLayoutManager.setSpanSizeLookup(new GridLayoutManager.SpanSizeLookup()
{
@Override
public
int getSpanSize(final int position) {
switch (mRecyclerViewAdapter.getItemViewType(position)) {
case ContentModel.TYPE:
return 1;
case CategoryModel.TYPE:
default:
return 3;
}
}
});
mRecyclerView = (RecyclerView) findViewById(R.id.recycler_view);
mRecyclerView.setLayoutManager(mLayoutManager);
mRecyclerView.setAdapter(mRecyclerViewAdapter);
mRecyclerView.addItemDecoration(new EqualSpacesItemDecoration(20));
updateItems();
}
private
void updateItems() {
// mRecyclerViewAdapter.setItems(getItems());
// mRecyclerViewAdapter.notifyDataSetChanged();
mRecyclerViewAdapter.setItems(getItems(), mDiffCallback);
mSwipeToRefresh.setRefreshing(false);
}
@NonNull
private
ArrayList<BaseItemModel> getItems() {
final ArrayList<BaseItemModel> items = new ArrayList<>();
final int headerID = 1;
items.add(new HeaderModel(headerID, "header"));
final int categoryCount = random(3, 9);
for (int i = 0; i < categoryCount; i++) {
final int categoryID = i * 10;
items.add(new CategoryModel(categoryID, "some category #" + (i + 1)));
final ArrayList<BaseItemModel> content = new ArrayList<>();
final int contentCount = random(1, 9);
for (int j = 0; j < contentCount; j++) {
final int contentID = i * 10 + j;
content.add(new ContentModel(contentID, "content " + (j + 1)));
}
Collections.shuffle(content);
items.addAll(content);
}
return items;
}
private
int random(final int min, final int max) {
final Random random = new Random();
return random.nextInt(max - min + 1) + min;
}
@NonNull
private final ContentViewRenderer.Listener mListener = new ContentViewRenderer.Listener()
{
@Override
public
void onCategoryClicked(@NonNull final CategoryModel model) {
Toast.makeText(MainActivity.this, model.getName(), Toast.LENGTH_SHORT).show();
}
@Override
public
void onContentItemClicked(@NonNull final ContentModel model) {
Toast.makeText(MainActivity.this, model.getName(), Toast.LENGTH_SHORT).show();
}
};
@NonNull
private final RendererRecyclerViewAdapter.DiffCallback<BaseItemModel> mDiffCallback = new RendererRecyclerViewAdapter
.DiffCallback<BaseItemModel>()
{
@Override
public
boolean areItemsTheSame(final BaseItemModel oldItem, final BaseItemModel newItem) {
return oldItem.getID() == newItem.getID();
}
@Override
public
boolean areContentsTheSame(final BaseItemModel oldItem, final BaseItemModel newItem) {
return oldItem.equals(newItem);
}
};
}
|
Fix example
|
example/src/main/java/com/github/vivchar/immutableadapter/MainActivity.java
|
Fix example
|
|
Java
|
apache-2.0
|
2b83d21ba78b57de80e81b3e548f8d83064cbb23
| 0
|
pcingola/BigDataScript,pcingola/BigDataScript,pcingola/BigDataScript,pcingola/BigDataScript,pcingola/BigDataScript,pcingola/BigDataScript,pcingola/BigDataScript
|
package org.bds;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.antlr.v4.runtime.ANTLRFileStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.LexerNoViableAltException;
import org.antlr.v4.runtime.RuleContext;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.Tree;
import org.bds.antlr.BigDataScriptLexer;
import org.bds.antlr.BigDataScriptParser;
import org.bds.antlr.BigDataScriptParser.IncludeFileContext;
import org.bds.compile.CompileErrorStrategy;
import org.bds.compile.CompilerErrorListener;
import org.bds.compile.CompilerMessage.MessageType;
import org.bds.compile.CompilerMessages;
import org.bds.compile.TypeCheckedNodes;
import org.bds.data.Data;
import org.bds.executioner.Executioner;
import org.bds.executioner.Executioners;
import org.bds.executioner.Executioners.ExecutionerType;
import org.bds.lang.BdsNode;
import org.bds.lang.BdsNodeFactory;
import org.bds.lang.ExpressionTask;
import org.bds.lang.FunctionCall;
import org.bds.lang.FunctionDeclaration;
import org.bds.lang.ProgramUnit;
import org.bds.lang.Statement;
import org.bds.lang.StatementInclude;
import org.bds.lang.Type;
import org.bds.lang.TypeList;
import org.bds.lang.VarDeclaration;
import org.bds.lang.nativeFunctions.NativeLibraryFunctions;
import org.bds.lang.nativeMethods.string.NativeLibraryString;
import org.bds.run.BdsThread;
import org.bds.run.HelpCreator;
import org.bds.run.RunState;
import org.bds.scope.Scope;
import org.bds.scope.ScopeSymbol;
import org.bds.serialize.BdsSerializer;
import org.bds.task.TaskDependecies;
import org.bds.util.Gpr;
import org.bds.util.Timer;
/**
* BDS command line
*
* @author pcingola
*/
public class Bds {
enum BdsAction {
RUN, RUN_CHECKPOINT, INFO_CHECKPOINT, TEST
}
public static final String SOFTWARE_NAME = Bds.class.getSimpleName();
public static final String BUILD = Gpr.compileTimeStamp(Bds.class);
public static final String REVISION = "m";
public static final String VERSION_MAJOR = "0.99999";
public static final String VERSION_SHORT = VERSION_MAJOR + REVISION;
public static final String VERSION = SOFTWARE_NAME + " " + VERSION_SHORT + " (build " + BUILD + "), by " + Pcingola.BY;
boolean checkPidRegex; // Check PID regex (do not run program)
boolean debug; // debug mode
boolean dryRun; // Dry run (do not run tasks)
boolean extractSource; // Extract source code form checkpoint (nly valid on recovery mode)
boolean log; // Log everything (keep STDOUT, SDTERR and ExitCode files)
Boolean noCheckpoint; // Do not create checkpoint files
Boolean noRmOnExit; // Do not remove temp files on exit
boolean quiet; // Quiet mode
boolean stackCheck; // Check stack size when thread finishes runnig (should be zero)
boolean verbose; // Verbose mode
boolean warnUnusedFunctionsAnyFile; // Warn if there are unused functions in all any (included) file
Boolean reportHtml; // Use HTML report style
Boolean reportYaml; // Use YAML report style
int taskFailCount = -1;
String configFile = Config.DEFAULT_CONFIG_FILE; // Configuration file
String chekcpointRestoreFile; // Restore file
String programFileName; // Program file name
String pidFile; // File to store PIDs
String reportFileName;
String system; // System type
String queue; // Queue name
BdsAction bdsAction;
Config config;
ProgramUnit programUnit; // Program (parsed nodes)
BdsThread bdsThread;
ArrayList<String> programArgs; // Command line arguments for BigDataScript program
/**
* Create an AST from a program (using ANTLR lexer & parser)
* Returns null if error
* Use 'alreadyIncluded' to keep track of from 'include' statements
*/
public static ParseTree createAst(File file, boolean debug, Set<String> alreadyIncluded) {
alreadyIncluded.add(Gpr.getCanonicalFileName(file));
String fileName = file.toString();
String filePath = fileName;
BigDataScriptLexer lexer = null;
BigDataScriptParser parser = null;
try {
filePath = file.getCanonicalPath();
// Input stream
if (!Gpr.canRead(filePath)) {
CompilerMessages.get().addError("Can't read file '" + filePath + "'");
return null;
}
// Create a CharStream that reads from standard input
ANTLRFileStream input = new ANTLRFileStream(fileName);
//---
// Lexer: Create a lexer that feeds off of input CharStream
//---
lexer = new BigDataScriptLexer(input) {
@Override
public void recover(LexerNoViableAltException e) {
throw new RuntimeException(e); // Bail out
}
};
//---
// Parser
//---
CommonTokenStream tokens = new CommonTokenStream(lexer);
parser = new BigDataScriptParser(tokens);
// Parser error handling
parser.setErrorHandler(new CompileErrorStrategy()); // Bail out with exception if errors in parser
parser.addErrorListener(new CompilerErrorListener()); // Catch some other error messages that 'CompileErrorStrategy' fails to catch
// Begin parsing at main rule
ParseTree tree = parser.programUnit();
// Error loading file?
if (tree == null) {
System.err.println("Can't parse file '" + filePath + "'");
return null;
}
// Show main nodes
if (debug) {
Timer.showStdErr("AST:");
for (int childNum = 0; childNum < tree.getChildCount(); childNum++) {
Tree child = tree.getChild(childNum);
System.err.println("\t\tChild " + childNum + ":\t" + child + "\tTree:'" + child.toStringTree() + "'");
}
}
// Included files
boolean resolveIncludePending = true;
while (resolveIncludePending)
resolveIncludePending = resolveIncludes(tree, debug, alreadyIncluded);
return tree;
} catch (Exception e) {
String msg = e.getMessage();
CompilerMessages.get().addError("Could not compile " + filePath //
+ (msg != null ? " :" + e.getMessage() : "") //
);
return null;
}
}
/**
* Main
*/
public static void main(String[] args) {
// Create BigDataScript object and run it
Bds bigDataScript = new Bds(args);
int exitValue = bigDataScript.run();
System.exit(exitValue);
}
/**
* Resolve include statements
*/
private static boolean resolveIncludes(ParseTree tree, boolean debug, Set<String> alreadyIncluded) {
boolean changed = false;
if (tree instanceof IncludeFileContext) {
// Parent file: The one that is including the other file
File parentFile = new File(((IncludeFileContext) tree).getStart().getInputStream().getSourceName());
// Included file name
String includedFilename = StatementInclude.includeFileName(tree.getChild(1).getText());
// Find file (look into all include paths)
File includedFile = StatementInclude.includeFile(includedFilename, parentFile);
if (includedFile == null) {
CompilerMessages.get().add(tree, parentFile, "\n\tIncluded file not found: '" + includedFilename + "'\n\tSearch path: " + Config.get().getIncludePath(), MessageType.ERROR);
return false;
}
// Already included? don't bother
String canonicalFileName = Gpr.getCanonicalFileName(includedFile);
if (alreadyIncluded.contains(canonicalFileName)) {
if (debug) Gpr.debug("File already included: '" + includedFilename + "'\tCanonical path: '" + canonicalFileName + "'");
return false;
}
// Can we read the include file?
if (!includedFile.canRead()) {
CompilerMessages.get().add(tree, parentFile, "\n\tCannot read included file: '" + includedFilename + "'", MessageType.ERROR);
return false;
}
// Parse
ParseTree treeinc = createAst(includedFile, debug, alreadyIncluded);
if (treeinc == null) {
CompilerMessages.get().add(tree, parentFile, "\n\tFatal error including file '" + includedFilename + "'", MessageType.ERROR);
return false;
}
// Is a child always a RuleContext?
for (int i = 0; i < treeinc.getChildCount(); i++) {
((IncludeFileContext) tree).addChild((RuleContext) treeinc.getChild(i));
}
} else {
for (int i = 0; i < tree.getChildCount(); i++)
changed |= resolveIncludes(tree.getChild(i), debug, alreadyIncluded);
}
return changed;
}
public Bds(String args[]) {
initDefaults();
parse(args);
initialize();
}
/**
* Check 'pidRegex'
*/
public void checkPidRegex() {
// PID regex matcher
String pidPatternStr = config.getPidRegex("");
if (pidPatternStr.isEmpty()) {
System.err.println("Cannot find 'pidRegex' entry in config file.");
System.exit(1);
}
Executioner executioner = Executioners.getInstance().get(ExecutionerType.CLUSTER);
// Show pattern
System.out.println("Matching pidRegex '" + pidPatternStr + "'");
// Read STDIN and check pattern
try {
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
String line;
while ((line = in.readLine()) != null) {
String pid = executioner.parsePidLine(line);
System.out.println("Input line:\t'" + line + "'\tMatched: '" + pid + "'");
}
} catch (IOException e) {
e.printStackTrace();
}
executioner.kill(); // Kill executioner
}
/**
* Compile program
*/
public boolean compile() {
if (debug) log("Loading file: '" + programFileName + "'");
//---
// Convert to AST
//---
if (debug) log("Creating AST.");
CompilerMessages.reset();
ParseTree tree = null;
try {
tree = createAst();
} catch (Exception e) {
System.err.println("Fatal error cannot continue - " + e.getMessage());
return false;
}
// No tree produced? Fatal error
if (tree == null) {
if (CompilerMessages.get().isEmpty()) {
CompilerMessages.get().addError("Fatal error: Could not compile");
}
return false;
}
// Any error? Do not continue
if (!CompilerMessages.get().isEmpty()) return false;
//---
// Convert to BigDataScriptNodes
//---
if (debug) log("Creating BigDataScript tree.");
CompilerMessages.reset();
programUnit = (ProgramUnit) BdsNodeFactory.get().factory(null, tree); // Transform AST to BigDataScript tree
if (debug) log("AST:\n" + programUnit.toString());
// Any error messages?
if (!CompilerMessages.get().isEmpty()) System.err.println("Compiler messages:\n" + CompilerMessages.get());
if (CompilerMessages.get().hasErrors()) return false;
//---
// Type-checking
//---
if (debug) log("Type checking.");
CompilerMessages.reset();
Scope programScope = new Scope();
programUnit.typeChecking(programScope, CompilerMessages.get());
// Any error messages?
if (!CompilerMessages.get().isEmpty()) System.err.println("Compiler messages:\n" + CompilerMessages.get());
if (CompilerMessages.get().hasErrors()) return false;
// Free some memory by reseting structure we won't use any more
TypeCheckedNodes.get().reset();
// Perform some checking and show warning messages
compileWarn();
if (!CompilerMessages.get().hasErrors()) System.err.println("Compiler messages:\n" + CompilerMessages.get());
// OK
return true;
}
/**
* Perform some checking and show warning messages
*/
void compileWarn() {
compileWarnUnusedFunctions();
}
/**
* Check for unused functions
*/
void compileWarnUnusedFunctions() {
String progUnitFile = programUnit.getFileNameCanonical();
List<BdsNode> fdecls = programUnit.findNodes(FunctionDeclaration.class, true);
// Add all to 'unused' set
Set<FunctionDeclaration> unused = new HashSet<>();
for (BdsNode n : fdecls) {
FunctionDeclaration fdecl = (FunctionDeclaration) n;
if (warnUnusedFunctionsAnyFile //
|| (!warnUnusedFunctionsAnyFile && progUnitFile.equals(fdecl.getFileNameCanonical()))) {
unused.add(fdecl);
}
}
// Remove the ones that are used
List<BdsNode> fcalls = programUnit.findNodes(FunctionCall.class, true);
for (BdsNode n : fcalls) {
FunctionCall fcall = (FunctionCall) n;
FunctionDeclaration fdecl = fcall.getFunctionDeclaration();
unused.remove(fdecl);
}
// Any functions that are 'unused'?
if (unused.isEmpty()) return;
for (FunctionDeclaration fdecl : unused) {
CompilerMessages.get().add(fdecl, "Unused function " + fdecl.getFunctionName() + fdecl.signature(), MessageType.WARNING);
}
}
/**
* Load configuration file
*/
protected void config() {
//---
// Config
//---
config = new Config(configFile);
config.setQuiet(quiet);
config.setVerbose(verbose);
config.setDebug(debug);
config.setLog(log);
config.setDryRun(dryRun);
config.setTaskFailCount(taskFailCount);
config.setReportFileName(reportFileName);
config.setExtractSource(extractSource);
config.setVerbose(verbose);
// Override config file by command line option
if (noRmOnExit != null) config.setNoRmOnExit(noRmOnExit);
if (noCheckpoint != null) config.setNoCheckpoint(noCheckpoint);
if (reportHtml != null) config.setReportHtml(reportHtml);
if (reportYaml != null) config.setReportYaml(reportYaml);
if (pidFile == null) {
if (programFileName != null) pidFile = programFileName + ".pid";
else pidFile = chekcpointRestoreFile + ".pid";
}
config.setPidFile(pidFile);
}
/**
* Create an AST from a program file
* @return A parsed tree
*/
ParseTree createAst() {
File file = new File(programFileName);
return createAst(file, debug, new HashSet<String>());
}
/**
* Download a URL to a local file
* @return true if successful
*/
public boolean download(String url, String fileName) {
Data remote = Data.factory(url);
// Sanity checks
if (!remote.isRemote()) {
System.err.println("Cannot download non-remote URL: " + url);
return false;
}
if (!remote.isFile()) {
System.err.println("Cannot download non-file: " + url);
return false;
}
// Already downloaded? Nothing to do
if (remote.isDownloaded(fileName)) {
if (verbose) System.err.println("Local file is up to date, no download required: " + fileName);
return true;
}
return remote.download(fileName);
}
public BdsThread getBigDataScriptThread() {
return bdsThread;
}
public CompilerMessages getCompilerMessages() {
return CompilerMessages.get();
}
public Config getConfig() {
return config;
}
public ArrayList<String> getProgramArgs() {
return programArgs;
}
public ProgramUnit getProgramUnit() {
return programUnit;
}
/**
* Show information from a checkpoint file
*/
int infoCheckpoint() {
// Load checkpoint file
BdsSerializer bdsSerializer = new BdsSerializer(chekcpointRestoreFile, config);
List<BdsThread> bdsThreads = bdsSerializer.load();
for (BdsThread bdsThread : bdsThreads)
bdsThread.print();
return 0;
}
/**
* Get default settings
*/
void initDefaults() {
reportFileName = null;
reportHtml = null;
reportYaml = null;
dryRun = false;
log = false;
}
/**
* Initialize before running or type-checking
*/
void initialize() {
Type.reset();
// Reset node factory
BdsNodeFactory.reset();
// Startup message
if (verbose || debug) Timer.showStdErr(VERSION);
// Load config file
config();
// Global scope
initilaizeGlobalScope();
// Libraries
initilaizeLibraries();
}
/**
* Add symbols to global scope
*/
void initilaizeGlobalScope() {
if (debug) log("Initialize global scope.");
// Reset Global scope
Scope.resetGlobalScope();
Scope globalScope = Scope.getGlobalScope();
//--
// Get default veluas from command line or config file
//---
// Command line parameters override defaults
String cpusStr = config.getString(ExpressionTask.TASK_OPTION_CPUS, "1"); // Default number of cpus: 1
long cpus = Gpr.parseIntSafe(cpusStr);
if (cpus <= 0) throw new RuntimeException("Number of cpus must be a positive number ('" + cpusStr + "')");
long mem = Gpr.parseMemSafe(config.getString(ExpressionTask.TASK_OPTION_MEM, "-1")); // Default amount of memory: -1 (unrestricted)
String node = config.getString(ExpressionTask.TASK_OPTION_NODE, "");
if (queue == null) queue = config.getString(ExpressionTask.TASK_OPTION_QUEUE, "");
if (system == null) system = config.getString(ExpressionTask.TASK_OPTION_SYSTEM, ExecutionerType.LOCAL.toString().toLowerCase());
if (taskFailCount < 0) taskFailCount = Gpr.parseIntSafe(config.getString(ExpressionTask.TASK_OPTION_RETRY, "0"));
long oneDay = 1L * 24 * 60 * 60;
long timeout = Gpr.parseLongSafe(config.getString(ExpressionTask.TASK_OPTION_TIMEOUT, "" + oneDay));
long wallTimeout = Gpr.parseLongSafe(config.getString(ExpressionTask.TASK_OPTION_WALL_TIMEOUT, "" + oneDay));
long cpusLocal = Gpr.parseLongSafe(config.getString(Scope.GLOBAL_VAR_LOCAL_CPUS, "" + Gpr.NUM_CORES));
// ---
// Add global symbols
// ---
globalScope.add(new ScopeSymbol(Scope.GLOBAL_VAR_PROGRAM_NAME, Type.STRING, "")); // Now is empty, but they are assigned later
globalScope.add(new ScopeSymbol(Scope.GLOBAL_VAR_PROGRAM_PATH, Type.STRING, ""));
// Task related variables: Default values
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_SYSTEM, Type.STRING, system)); // System type: "local", "ssh", "cluster", "aws", etc.
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_CPUS, Type.INT, cpus)); // Default number of cpus
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_MEM, Type.INT, mem)); // Default amount of memory (unrestricted)
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_QUEUE, Type.STRING, queue)); // Default queue: none
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_NODE, Type.STRING, node)); // Default node: none
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_CAN_FAIL, Type.BOOL, false)); // Task fail triggers checkpoint & exit (a task cannot fail)
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_ALLOW_EMPTY, Type.BOOL, false)); // Tasks are allowed to have empty output file/s
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_RETRY, Type.INT, (long) taskFailCount)); // Task fail can be re-tried (re-run) N times before considering failed.
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_TIMEOUT, Type.INT, timeout)); // Task default timeout
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_WALL_TIMEOUT, Type.INT, wallTimeout)); // Task default wall-timeout
globalScope.add(new ScopeSymbol(Scope.GLOBAL_VAR_LOCAL_CPUS, Type.INT, cpusLocal));
// Number of local CPUs
// Kilo, Mega, Giga, Tera, Peta.
LinkedList<ScopeSymbol> constants = new LinkedList<>();
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_K, Type.INT, 1024L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_M, Type.INT, 1024L * 1024L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_G, Type.INT, 1024L * 1024L * 1024L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_T, Type.INT, 1024L * 1024L * 1024L * 1024L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_P, Type.INT, 1024L * 1024L * 1024L * 1024L * 1024L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_MINUTE, Type.INT, 60L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_HOUR, Type.INT, (long) (60 * 60)));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_DAY, Type.INT, (long) (24 * 60 * 60)));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_WEEK, Type.INT, (long) (7 * 24 * 60 * 60)));
// Math constants
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_E, Type.REAL, Math.E));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_PI, Type.REAL, Math.PI));
// Add all constants
for (ScopeSymbol ss : constants) {
ss.setConstant(true);
globalScope.add(ss);
}
// Set "physical" path
String path;
try {
path = new File(".").getCanonicalPath();
} catch (IOException e) {
throw new RuntimeException("Cannot get cannonical path for current dir");
}
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_PHYSICAL_PATH, Type.STRING, path));
// Set all environment variables
Map<String, String> envMap = System.getenv();
for (String varName : envMap.keySet()) {
String varVal = envMap.get(varName);
globalScope.add(new ScopeSymbol(varName, Type.STRING, varVal));
}
// Command line arguments (default: empty list)
// This is properly set in 'initializeArgs()' method, but
// we have to set something now, otherwise we'll get a "variable
// not found" error at compiler time, if the program attempts
// to use 'args'.
Scope.getGlobalScope().add(new ScopeSymbol(Scope.GLOBAL_VAR_ARGS_LIST, TypeList.get(Type.STRING), new ArrayList<String>()));
}
/**
* Initialize standard libraries
*/
void initilaizeLibraries() {
if (debug) log("Initialize standard libraries.");
// Native functions
NativeLibraryFunctions nativeLibraryFunctions = new NativeLibraryFunctions();
if (debug) log("Native library:\n" + nativeLibraryFunctions);
// Native library: String
NativeLibraryString nativeLibraryString = new NativeLibraryString();
if (debug) log("Native library:\n" + nativeLibraryString);
}
/**
* Is this a command line option (e.g. "-tfam" is a command line option, but "-" means STDIN)
*/
protected boolean isOpt(String arg) {
return arg.startsWith("-") && (arg.length() > 1);
}
void log(String msg) {
Timer.showStdErr(getClass().getSimpleName() + ": " + msg);
}
/**
* Parse command line arguments
*/
public void parse(String[] args) {
// Nothing? Show command line options
if (args.length <= 0) usage(null);
programArgs = new ArrayList<>();
bdsAction = BdsAction.RUN;
for (int i = 0; i < args.length; i++) {
String arg = args[i];
if (programFileName != null) {
// Everything after 'programFileName' is an command line
// argument for the BigDataScript program
programArgs.add(arg);
} else if (isOpt(arg)) {
switch (arg.toLowerCase()) {
case "-checkpidregex":
checkPidRegex = true;
break;
case "-c":
case "-config":
// Checkpoint restore
if ((i + 1) < args.length) configFile = args[++i];
else usage("Option '-c' without restore file argument");
break;
case "-d":
case "-debug":
debug = verbose = true; // Debug implies verbose
break;
case "-download":
if ((i + 2) < args.length) {
config();
boolean ok = download(args[++i], args[++i]);
System.exit(ok ? 0 : 1);
} else usage("Option '-download' requires two parameters (URL and file)");
break;
case "-dryrun":
dryRun = true;
noRmOnExit = true; // Not running, so don't delete files
reportHtml = reportYaml = false; // Don't create reports
break;
case "-extractsource":
extractSource = true;
break;
case "-h":
case "-help":
case "--help":
usage(null);
break;
case "-i":
case "-info":
// Checkpoint info
if ((i + 1) < args.length) chekcpointRestoreFile = args[++i];
else usage("Option '-i' without checkpoint file argument");
bdsAction = BdsAction.INFO_CHECKPOINT;
break;
case "-l":
case "-log":
log = true;
break;
case "-nochp":
noCheckpoint = true;
break;
case "-noreport":
reportHtml = reportYaml = false;
break;
case "-noreporthtml":
reportHtml = false;
break;
case "-noreportyaml":
reportYaml = false;
break;
case "-normonexit":
noRmOnExit = true;
break;
case "-pid":
// PID file
if ((i + 1) < args.length) pidFile = args[++i];
else usage("Option '-pid' without file argument");
break;
case "-q":
case "-queue":
// Queue name
if ((i + 1) < args.length) queue = args[++i];
else usage("Option '-queue' without file argument");
break;
case "-quiet":
verbose = false;
debug = false;
quiet = true;
break;
case "-r":
case "-restore":
// Checkpoint restore
if ((i + 1) < args.length) chekcpointRestoreFile = args[++i];
else usage("Option '-r' without checkpoint file argument");
bdsAction = BdsAction.RUN_CHECKPOINT;
break;
case "-reporthtml":
reportHtml = true;
break;
case "-reportname":
if ((i + 1) < args.length) reportFileName = args[++i];
else usage("Option '-reportName' without name argument");
break;
case "-reportyaml":
case "-yaml":
reportYaml = true;
break;
case "-s":
case "-system":
// System type
if ((i + 1) < args.length) system = args[++i];
else usage("Option '-system' without file argument");
break;
case "-t":
case "-test":
bdsAction = BdsAction.TEST;
break;
case "-upload":
if ((i + 2) < args.length) {
config();
boolean ok = upload(args[++i], args[++i]);
System.exit(ok ? 0 : 1);
} else usage("Option '-upload' requires two parameters (file and URL)");
break;
case "-v":
case "-verbose":
verbose = true;
break;
case "-version":
System.out.println(VERSION);
System.exit(0);
break;
case "-wall":
warnUnusedFunctionsAnyFile = true;
break;
case "-y":
case "-retry":
// Number of retries
if ((i + 1) < args.length) taskFailCount = Gpr.parseIntSafe(args[++i]);
else usage("Option '-t' without number argument");
break;
default:
usage("Unknown command line option " + arg);
}
} else if (programFileName == null) programFileName = arg; // Get program file name
}
// Sanity checks
if (checkPidRegex) {
// OK: Nothing to chek
} else if ((programFileName == null) && (chekcpointRestoreFile == null)) {
// No file name => Error
usage("Missing program file name.");
}
}
/**
* Run script
*/
public int run() {
// Initialize
Executioners executioners = Executioners.getInstance(config);
TaskDependecies.reset();
// Check PID regex
if (checkPidRegex) {
checkPidRegex();
return 0;
}
//---
// Run
//---
int exitValue = 0;
switch (bdsAction) {
case RUN_CHECKPOINT:
exitValue = runCheckpoint();
break;
case INFO_CHECKPOINT:
exitValue = infoCheckpoint();
break;
case TEST:
exitValue = runTests();
break;
default:
exitValue = runCompile(); // Compile & run
}
if (verbose) Timer.showStdErr("Finished. Exit code: " + exitValue);
//---
// Kill all executioners
//---
for (Executioner executioner : executioners.getAll())
executioner.kill();
config.kill(); // Kill 'tail' and 'monitor' threads
return exitValue;
}
/**
* Restore from checkpoint and run
*/
int runCheckpoint() {
// Load checkpoint file
BdsSerializer bdsSerializer = new BdsSerializer(chekcpointRestoreFile, config);
List<BdsThread> bdsThreads = bdsSerializer.load();
// Set main thread's programUnit running scope (mostly for debugging and test cases)
// ProgramUnit's scope it the one before 'global'
BdsThread mainThread = bdsThreads.get(0);
programUnit = mainThread.getProgramUnit();
// Set state and recover tasks
for (BdsThread bdsThread : bdsThreads) {
if (bdsThread.isFinished()) {
// Thread finished before serialization: Nothing to do
} else {
bdsThread.setRunState(RunState.CHECKPOINT_RECOVER); // Set run state to recovery
bdsThread.restoreUnserializedTasks(); // Re-execute or add tasks
}
}
// All set, run main thread
return runThread(mainThread);
}
/**
* Compile and run
*/
int runCompile() {
// Compile, abort on errors
if (verbose) Timer.showStdErr("Parsing");
if (!compile()) {
// Show errors and warnings, if any
if (!CompilerMessages.get().isEmpty()) System.err.println("Compiler messages:\n" + CompilerMessages.get());
return 1;
}
if (verbose) Timer.showStdErr("Initializing");
BdsParseArgs bdsParseArgs = new BdsParseArgs(this);
bdsParseArgs.setDebug(debug);
bdsParseArgs.parse();
// Run the program
BdsThread bdsThread = new BdsThread(programUnit, config);
if (verbose) Timer.showStdErr("Process ID: " + bdsThread.getBdsThreadId());
// Show script's automatic help message
if (bdsParseArgs.isShowHelp()) {
if (verbose) Timer.showStdErr("Showing automaic 'help'");
HelpCreator hc = new HelpCreator(programUnit);
System.out.println(hc);
return 0;
}
if (verbose) Timer.showStdErr("Running");
int exitCode = runThread(bdsThread);
// Check stack
if (stackCheck) bdsThread.sanityCheckStack();
return exitCode;
}
/**
* Compile and run
*/
int runTests() {
// Compile, abort on errors
if (verbose) Timer.showStdErr("Parsing");
if (!compile()) {
// Show errors and warnings, if any
if (!CompilerMessages.get().isEmpty()) System.err.println("Compiler messages:\n" + CompilerMessages.get());
return 1;
}
if (verbose) Timer.showStdErr("Initializing");
BdsParseArgs bdsParseArgs = new BdsParseArgs(this);
bdsParseArgs.setDebug(debug);
bdsParseArgs.parse();
// Run the program
BdsThread bdsThread = new BdsThread(programUnit, config);
if (verbose) Timer.showStdErr("Process ID: " + bdsThread.getBdsThreadId());
if (verbose) Timer.showStdErr("Running tests");
ProgramUnit pu = bdsThread.getProgramUnit();
return runTests(pu);
}
/**
* For each "test*()" function in ProgramUnit, create a thread
* that executes the function's body
*/
int runTests(ProgramUnit progUnit) {
// We need to execute all variable declarations in order to be able to use global vairables in 'test*()' functions"
List<VarDeclaration> varDecls = programUnit.varDeclarations(false);
List<FunctionDeclaration> testFuncs = progUnit.testsFunctions();
int exitCode = 0;
int testOk = 0, testError = 0;
for (FunctionDeclaration testFunc : testFuncs) {
System.out.println("");
// Run each function
int exitValTest = runTests(progUnit, testFunc, varDecls);
// Show test result
if (exitValTest == 0) {
Timer.show("Test '" + testFunc.getFunctionName() + "': OK");
testOk++;
} else {
Timer.show("Test '" + testFunc.getFunctionName() + "': FAIL");
exitCode = 1;
testError++;
}
}
// Show results
System.out.println("");
Timer.show("Totals"//
+ "\n OK : " + testOk //
+ "\n ERROR : " + testError //
);
return exitCode;
}
/**
* Run a single test function, return exit code
*/
int runTests(ProgramUnit progUnit, FunctionDeclaration testFunc, List<VarDeclaration> varDecls) {
List<Statement> statements = new ArrayList<>();
// Add all variable declarations
for (VarDeclaration varDecl : varDecls)
statements.add(varDecl);
// Note: We execute the function's body (not the function declaration)
statements.add(testFunc.getStatement());
// Create a program unit having all variable declarations and the test function's statements
ProgramUnit puTest = new ProgramUnit(progUnit, null);
puTest.setStatements(statements.toArray(new Statement[0]));
BdsThread bdsTestThread = new BdsThread(puTest, config);
int exitValTest = runThread(bdsTestThread);
return exitValTest;
}
/**
* Run a thread
*/
int runThread(BdsThread bdsThread) {
this.bdsThread = bdsThread;
if (bdsThread.isFinished()) return 0;
bdsThread.start();
try {
bdsThread.join();
} catch (InterruptedException e) {
// Nothing to do?
// May be checkpoint?
return 1;
}
// Check stack
if (stackCheck) bdsThread.sanityCheckStack();
// OK, we are done
return bdsThread.getExitValue();
}
public void setStackCheck(boolean stackCheck) {
this.stackCheck = stackCheck;
}
/**
* Upload a local file to a URL
* @return true if successful
*/
public boolean upload(String fileName, String url) {
Data remote = Data.factory(url);
Data local = Data.factory(fileName);
// Sanity checks
if (!remote.isRemote()) {
System.err.println("Cannot upload to non-remote URL: " + url);
return false;
}
if (!local.isFile()) {
System.err.println("Cannot upload non-file: " + fileName);
return false;
}
if (!local.exists()) {
System.err.println("Local file does not exists: " + fileName);
return false;
}
if (!local.canRead()) {
System.err.println("Cannot read local file : " + fileName);
return false;
}
// Already uploaded? Nothing to do
if (remote.isUploaded(fileName)) {
if (verbose) System.err.println("Remote file is up to date, no upload required: " + url);
return true;
}
return remote.upload(fileName);
}
void usage(String err) {
if (err != null) System.err.println("Error: " + err);
System.out.println(VERSION + "\n");
System.err.println("Usage: " + Bds.class.getSimpleName() + " [options] file.bds");
System.err.println("\nAvailable options: ");
System.err.println(" [-c | -config ] bds.config : Config file. Default : " + configFile);
System.err.println(" [-checkPidRegex] : Check configuration's 'pidRegex' by matching stdin.");
System.err.println(" [-d | -debug ] : Debug mode.");
System.err.println(" -download url file : Download 'url' to local 'file'. Note: Used by 'taks'");
// System.err.println(" -done : Use 'done' files: Default: " + useDoneFile);
System.err.println(" -dryRun : Do not run any task, just show what would be run. Default: " + dryRun);
System.err.println(" [-extractSource] : Extract source code files from checkpoint (only valid combined with '-info').");
System.err.println(" [-i | -info ] checkpoint.chp : Show state information in checkpoint file.");
System.err.println(" [-l | -log ] : Log all tasks (do not delete tmp files). Default: " + log);
System.err.println(" -noChp : Do not create any checkpoint files.");
System.err.println(" -noRmOnExit : Do not remove files marked for deletion on exit (rmOnExit). Default: " + noRmOnExit);
System.err.println(" [-q | -queue ] queueName : Set default queue name.");
System.err.println(" -quiet : Do not show any messages or tasks outputs on STDOUT. Default: " + quiet);
System.err.println(" -reportHtml : Create HTML report. Default: " + reportHtml);
System.err.println(" -reportName <name> : Set base-name for report files.");
System.err.println(" -reportYaml : Create YAML report. Default: " + reportYaml);
System.err.println(" [-r | -restore] checkpoint.chp : Restore state from checkpoint file.");
System.err.println(" [-s | -system ] type : Set system type.");
System.err.println(" [-t | -test ] : Run user test cases (runs all test* functions).");
System.err.println(" -upload file url : Upload local file to 'url'. Note: Used by 'taks'");
System.err.println(" [-v | -verbose] : Be verbose.");
System.err.println(" -version : Show version and exit.");
System.err.println(" -wall : Show all compile time warnings.");
System.err.println(" [-y | -retry ] num : Number of times to retry a failing tasks.");
System.err.println(" -pid <file> : Write local processes PIDs to 'file'");
if (err != null) System.exit(1);
System.exit(0);
}
}
|
src/org/bds/Bds.java
|
package org.bds;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.antlr.v4.runtime.ANTLRFileStream;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.LexerNoViableAltException;
import org.antlr.v4.runtime.RuleContext;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.Tree;
import org.bds.antlr.BigDataScriptLexer;
import org.bds.antlr.BigDataScriptParser;
import org.bds.antlr.BigDataScriptParser.IncludeFileContext;
import org.bds.compile.CompileErrorStrategy;
import org.bds.compile.CompilerErrorListener;
import org.bds.compile.CompilerMessage.MessageType;
import org.bds.compile.CompilerMessages;
import org.bds.compile.TypeCheckedNodes;
import org.bds.data.Data;
import org.bds.executioner.Executioner;
import org.bds.executioner.Executioners;
import org.bds.executioner.Executioners.ExecutionerType;
import org.bds.lang.BdsNode;
import org.bds.lang.BdsNodeFactory;
import org.bds.lang.ExpressionTask;
import org.bds.lang.FunctionCall;
import org.bds.lang.FunctionDeclaration;
import org.bds.lang.ProgramUnit;
import org.bds.lang.Statement;
import org.bds.lang.StatementInclude;
import org.bds.lang.Type;
import org.bds.lang.TypeList;
import org.bds.lang.VarDeclaration;
import org.bds.lang.nativeFunctions.NativeLibraryFunctions;
import org.bds.lang.nativeMethods.string.NativeLibraryString;
import org.bds.run.BdsThread;
import org.bds.run.HelpCreator;
import org.bds.run.RunState;
import org.bds.scope.Scope;
import org.bds.scope.ScopeSymbol;
import org.bds.serialize.BdsSerializer;
import org.bds.task.TaskDependecies;
import org.bds.util.Gpr;
import org.bds.util.Timer;
/**
* BDS command line
*
* @author pcingola
*/
public class Bds {
enum BdsAction {
RUN, RUN_CHECKPOINT, INFO_CHECKPOINT, TEST
}
public static final String SOFTWARE_NAME = Bds.class.getSimpleName();
public static final String BUILD = Gpr.compileTimeStamp(Bds.class);
public static final String REVISION = "m";
public static final String VERSION_MAJOR = "0.99999";
public static final String VERSION_SHORT = VERSION_MAJOR + REVISION;
public static final String VERSION = SOFTWARE_NAME + " " + VERSION_SHORT + " (build " + BUILD + "), by " + Pcingola.BY;
boolean checkPidRegex; // Check PID regex (do not run program)
boolean debug; // debug mode
boolean dryRun; // Dry run (do not run tasks)
boolean extractSource; // Extract source code form checkpoint (nly valid on recovery mode)
boolean log; // Log everything (keep STDOUT, SDTERR and ExitCode files)
Boolean noCheckpoint; // Do not create checkpoint files
Boolean noRmOnExit; // Do not remove temp files on exit
boolean quiet; // Quiet mode
boolean stackCheck; // Check stack size when thread finishes runnig (should be zero)
boolean verbose; // Verbose mode
boolean warnUnusedFunctionsAnyFile; // Warn if there are unused functions in all any (included) file
Boolean reportHtml; // Use HTML report style
Boolean reportYaml; // Use YAML report style
int taskFailCount = -1;
String configFile = Config.DEFAULT_CONFIG_FILE; // Configuration file
String chekcpointRestoreFile; // Restore file
String programFileName; // Program file name
String pidFile; // File to store PIDs
String reportFileName;
String system; // System type
String queue; // Queue name
BdsAction bdsAction;
Config config;
ProgramUnit programUnit; // Program (parsed nodes)
BdsThread bdsThread;
ArrayList<String> programArgs; // Command line arguments for BigDataScript program
/**
* Create an AST from a program (using ANTLR lexer & parser)
* Returns null if error
* Use 'alreadyIncluded' to keep track of from 'include' statements
*/
public static ParseTree createAst(File file, boolean debug, Set<String> alreadyIncluded) {
alreadyIncluded.add(Gpr.getCanonicalFileName(file));
String fileName = file.toString();
String filePath = fileName;
BigDataScriptLexer lexer = null;
BigDataScriptParser parser = null;
try {
filePath = file.getCanonicalPath();
// Input stream
if (!Gpr.canRead(filePath)) {
CompilerMessages.get().addError("Can't read file '" + filePath + "'");
return null;
}
// Create a CharStream that reads from standard input
ANTLRFileStream input = new ANTLRFileStream(fileName);
//---
// Lexer: Create a lexer that feeds off of input CharStream
//---
lexer = new BigDataScriptLexer(input) {
@Override
public void recover(LexerNoViableAltException e) {
throw new RuntimeException(e); // Bail out
}
};
//---
// Parser
//---
CommonTokenStream tokens = new CommonTokenStream(lexer);
parser = new BigDataScriptParser(tokens);
// Parser error handling
parser.setErrorHandler(new CompileErrorStrategy()); // Bail out with exception if errors in parser
parser.addErrorListener(new CompilerErrorListener()); // Catch some other error messages that 'CompileErrorStrategy' fails to catch
// Begin parsing at main rule
ParseTree tree = parser.programUnit();
// Error loading file?
if (tree == null) {
System.err.println("Can't parse file '" + filePath + "'");
return null;
}
// Show main nodes
if (debug) {
Timer.showStdErr("AST:");
for (int childNum = 0; childNum < tree.getChildCount(); childNum++) {
Tree child = tree.getChild(childNum);
System.err.println("\t\tChild " + childNum + ":\t" + child + "\tTree:'" + child.toStringTree() + "'");
}
}
// Included files
boolean resolveIncludePending = true;
while (resolveIncludePending)
resolveIncludePending = resolveIncludes(tree, debug, alreadyIncluded);
return tree;
} catch (Exception e) {
String msg = e.getMessage();
CompilerMessages.get().addError("Could not compile " + filePath //
+ (msg != null ? " :" + e.getMessage() : "") //
);
return null;
}
}
/**
* Main
*/
public static void main(String[] args) {
// Create BigDataScript object and run it
Bds bigDataScript = new Bds(args);
int exitValue = bigDataScript.run();
System.exit(exitValue);
}
/**
* Resolve include statements
*/
private static boolean resolveIncludes(ParseTree tree, boolean debug, Set<String> alreadyIncluded) {
boolean changed = false;
if (tree instanceof IncludeFileContext) {
// Parent file: The one that is including the other file
File parentFile = new File(((IncludeFileContext) tree).getStart().getInputStream().getSourceName());
// Included file name
String includedFilename = StatementInclude.includeFileName(tree.getChild(1).getText());
// Find file (look into all include paths)
File includedFile = StatementInclude.includeFile(includedFilename, parentFile);
if (includedFile == null) {
CompilerMessages.get().add(tree, parentFile, "\n\tIncluded file not found: '" + includedFilename + "'\n\tSearch path: " + Config.get().getIncludePath(), MessageType.ERROR);
return false;
}
// Already included? don't bother
String canonicalFileName = Gpr.getCanonicalFileName(includedFile);
if (alreadyIncluded.contains(canonicalFileName)) {
if (debug) Gpr.debug("File already included: '" + includedFilename + "'\tCanonical path: '" + canonicalFileName + "'");
return false;
}
// Can we read the include file?
if (!includedFile.canRead()) {
CompilerMessages.get().add(tree, parentFile, "\n\tCannot read included file: '" + includedFilename + "'", MessageType.ERROR);
return false;
}
// Parse
ParseTree treeinc = createAst(includedFile, debug, alreadyIncluded);
if (treeinc == null) {
CompilerMessages.get().add(tree, parentFile, "\n\tFatal error including file '" + includedFilename + "'", MessageType.ERROR);
return false;
}
// Is a child always a RuleContext?
for (int i = 0; i < treeinc.getChildCount(); i++) {
((IncludeFileContext) tree).addChild((RuleContext) treeinc.getChild(i));
}
} else {
for (int i = 0; i < tree.getChildCount(); i++)
changed |= resolveIncludes(tree.getChild(i), debug, alreadyIncluded);
}
return changed;
}
public Bds(String args[]) {
initDefaults();
parse(args);
initialize();
}
/**
* Check 'pidRegex'
*/
public void checkPidRegex() {
// PID regex matcher
String pidPatternStr = config.getPidRegex("");
if (pidPatternStr.isEmpty()) {
System.err.println("Cannot find 'pidRegex' entry in config file.");
System.exit(1);
}
Executioner executioner = Executioners.getInstance().get(ExecutionerType.CLUSTER);
// Show pattern
System.out.println("Matching pidRegex '" + pidPatternStr + "'");
// Read STDIN and check pattern
try {
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
String line;
while ((line = in.readLine()) != null) {
String pid = executioner.parsePidLine(line);
System.out.println("Input line:\t'" + line + "'\tMatched: '" + pid + "'");
}
} catch (IOException e) {
e.printStackTrace();
}
executioner.kill(); // Kill executioner
}
/**
* Compile program
*/
public boolean compile() {
if (debug) log("Loading file: '" + programFileName + "'");
//---
// Convert to AST
//---
if (debug) log("Creating AST.");
CompilerMessages.reset();
ParseTree tree = null;
try {
tree = createAst();
} catch (Exception e) {
System.err.println("Fatal error cannot continue - " + e.getMessage());
return false;
}
// No tree produced? Fatal error
if (tree == null) {
if (CompilerMessages.get().isEmpty()) {
CompilerMessages.get().addError("Fatal error: Could not compile");
}
return false;
}
// Any error? Do not continue
if (!CompilerMessages.get().isEmpty()) return false;
//---
// Convert to BigDataScriptNodes
//---
if (debug) log("Creating BigDataScript tree.");
CompilerMessages.reset();
programUnit = (ProgramUnit) BdsNodeFactory.get().factory(null, tree); // Transform AST to BigDataScript tree
if (debug) log("AST:\n" + programUnit.toString());
// Any error messages?
if (!CompilerMessages.get().isEmpty()) System.err.println("Compiler messages:\n" + CompilerMessages.get());
if (CompilerMessages.get().hasErrors()) return false;
//---
// Type-checking
//---
if (debug) log("Type checking.");
CompilerMessages.reset();
Scope programScope = new Scope();
programUnit.typeChecking(programScope, CompilerMessages.get());
// Any error messages?
if (!CompilerMessages.get().isEmpty()) System.err.println("Compiler messages:\n" + CompilerMessages.get());
if (CompilerMessages.get().hasErrors()) return false;
// Free some memory by reseting structure we won't use any more
TypeCheckedNodes.get().reset();
// Perform some checking and show warning messages
compileWarn();
if (!CompilerMessages.get().hasErrors()) System.err.println("Compiler messages:\n" + CompilerMessages.get());
// OK
return true;
}
/**
* Perform some checking and show warning messages
*/
void compileWarn() {
compileWarnUnusedFunctions();
}
/**
* Check for unused functions
*/
void compileWarnUnusedFunctions() {
String progUnitFile = programUnit.getFileNameCanonical();
List<BdsNode> fdecls = programUnit.findNodes(FunctionDeclaration.class, true);
// Add all to 'unused' set
Set<FunctionDeclaration> unused = new HashSet<>();
for (BdsNode n : fdecls) {
FunctionDeclaration fdecl = (FunctionDeclaration) n;
if (warnUnusedFunctionsAnyFile //
|| (!warnUnusedFunctionsAnyFile && progUnitFile.equals(fdecl.getFileNameCanonical()))) {
unused.add(fdecl);
}
}
// Remove the ones that are used
List<BdsNode> fcalls = programUnit.findNodes(FunctionCall.class, true);
for (BdsNode n : fcalls) {
FunctionCall fcall = (FunctionCall) n;
FunctionDeclaration fdecl = fcall.getFunctionDeclaration();
unused.remove(fdecl);
}
// Any functions that are 'unused'?
if (unused.isEmpty()) return;
for (FunctionDeclaration fdecl : unused) {
CompilerMessages.get().add(fdecl, "Unused function " + fdecl.getFunctionName() + fdecl.signature(), MessageType.WARNING);
}
}
/**
* Load configuration file
*/
protected void config() {
//---
// Config
//---
config = new Config(configFile);
config.setQuiet(quiet);
config.setVerbose(verbose);
config.setDebug(debug);
config.setLog(log);
config.setDryRun(dryRun);
config.setTaskFailCount(taskFailCount);
config.setReportFileName(reportFileName);
config.setExtractSource(extractSource);
config.setVerbose(verbose);
// Override config file by command line option
if (noRmOnExit != null) config.setNoRmOnExit(noRmOnExit);
if (noCheckpoint != null) config.setNoCheckpoint(noCheckpoint);
if (reportHtml != null) config.setReportHtml(reportHtml);
if (reportYaml != null) config.setReportYaml(reportYaml);
if (pidFile == null) {
if (programFileName != null) pidFile = programFileName + ".pid";
else pidFile = chekcpointRestoreFile + ".pid";
}
config.setPidFile(pidFile);
}
/**
* Create an AST from a program file
* @return A parsed tree
*/
ParseTree createAst() {
File file = new File(programFileName);
return createAst(file, debug, new HashSet<String>());
}
/**
* Download a URL to a local file
* @return true if successful
*/
public boolean download(String url, String fileName) {
Data remote = Data.factory(url);
// Sanity checks
if (!remote.isRemote()) {
System.err.println("Cannot download non-remote URL: " + url);
return false;
}
if (!remote.isFile()) {
System.err.println("Cannot download non-file: " + url);
return false;
}
// Already downloaded? Nothing to do
if (remote.isDownloaded(fileName)) {
if (verbose) System.err.println("Local file is up to date, no download required: " + fileName);
return true;
}
return remote.download(fileName);
}
public BdsThread getBigDataScriptThread() {
return bdsThread;
}
public CompilerMessages getCompilerMessages() {
return CompilerMessages.get();
}
public Config getConfig() {
return config;
}
public ArrayList<String> getProgramArgs() {
return programArgs;
}
public ProgramUnit getProgramUnit() {
return programUnit;
}
/**
* Show information from a checkpoint file
*/
int infoCheckpoint() {
// Load checkpoint file
BdsSerializer bdsSerializer = new BdsSerializer(chekcpointRestoreFile, config);
List<BdsThread> bdsThreads = bdsSerializer.load();
for (BdsThread bdsThread : bdsThreads)
bdsThread.print();
return 0;
}
/**
* Get default settings
*/
void initDefaults() {
reportFileName = null;
reportHtml = null;
reportYaml = null;
dryRun = false;
log = false;
}
/**
* Initialize before running or type-checking
*/
void initialize() {
Type.reset();
// Reset node factory
BdsNodeFactory.reset();
// Startup message
if (verbose || debug) Timer.showStdErr(VERSION);
// Load config file
config();
// Global scope
initilaizeGlobalScope();
// Libraries
initilaizeLibraries();
}
/**
* Add symbols to global scope
*/
void initilaizeGlobalScope() {
if (debug) log("Initialize global scope.");
// Reset Global scope
Scope.resetGlobalScope();
Scope globalScope = Scope.getGlobalScope();
//--
// Get default veluas from command line or config file
//---
// Command line parameters override defaults
String cpusStr = config.getString(ExpressionTask.TASK_OPTION_CPUS, "1"); // Default number of cpus: 1
long cpus = Gpr.parseIntSafe(cpusStr);
if (cpus <= 0) throw new RuntimeException("Number of cpus must be a positive number ('" + cpusStr + "')");
long mem = Gpr.parseMemSafe(config.getString(ExpressionTask.TASK_OPTION_MEM, "-1")); // Default amount of memory: -1 (unrestricted)
String node = config.getString(ExpressionTask.TASK_OPTION_NODE, "");
if (queue == null) queue = config.getString(ExpressionTask.TASK_OPTION_QUEUE, "");
if (system == null) system = config.getString(ExpressionTask.TASK_OPTION_SYSTEM, ExecutionerType.LOCAL.toString().toLowerCase());
if (taskFailCount < 0) taskFailCount = Gpr.parseIntSafe(config.getString(ExpressionTask.TASK_OPTION_RETRY, "0"));
long oneDay = 1L * 24 * 60 * 60;
long timeout = Gpr.parseLongSafe(config.getString(ExpressionTask.TASK_OPTION_TIMEOUT, "" + oneDay));
long wallTimeout = Gpr.parseLongSafe(config.getString(ExpressionTask.TASK_OPTION_WALL_TIMEOUT, "" + oneDay));
long cpusLocal = Gpr.parseLongSafe(config.getString(Scope.GLOBAL_VAR_LOCAL_CPUS, "" + Gpr.NUM_CORES));
// ---
// Add global symbols
// ---
globalScope.add(new ScopeSymbol(Scope.GLOBAL_VAR_PROGRAM_NAME, Type.STRING, "")); // Now is empty, but they are assigned later
globalScope.add(new ScopeSymbol(Scope.GLOBAL_VAR_PROGRAM_PATH, Type.STRING, ""));
// Task related variables: Default values
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_SYSTEM, Type.STRING, system)); // System type: "local", "ssh", "cluster", "aws", etc.
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_CPUS, Type.INT, cpus)); // Default number of cpus
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_MEM, Type.INT, mem)); // Default amount of memory (unrestricted)
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_QUEUE, Type.STRING, queue)); // Default queue: none
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_NODE, Type.STRING, node)); // Default node: none
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_CAN_FAIL, Type.BOOL, false)); // Task fail triggers checkpoint & exit (a task cannot fail)
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_ALLOW_EMPTY, Type.BOOL, false)); // Tasks are allowed to have empty output file/s
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_RETRY, Type.INT, (long) taskFailCount)); // Task fail can be re-tried (re-run) N times before considering failed.
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_TIMEOUT, Type.INT, timeout)); // Task default timeout
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_WALL_TIMEOUT, Type.INT, wallTimeout)); // Task default wall-timeout
globalScope.add(new ScopeSymbol(Scope.GLOBAL_VAR_LOCAL_CPUS, Type.INT, cpusLocal));
// Number of local CPUs
// Kilo, Mega, Giga, Tera, Peta.
LinkedList<ScopeSymbol> constants = new LinkedList<>();
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_K, Type.INT, 1024L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_M, Type.INT, 1024L * 1024L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_G, Type.INT, 1024L * 1024L * 1024L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_T, Type.INT, 1024L * 1024L * 1024L * 1024L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_P, Type.INT, 1024L * 1024L * 1024L * 1024L * 1024L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_MINUTE, Type.INT, 60L));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_HOUR, Type.INT, (long) (60 * 60)));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_DAY, Type.INT, (long) (24 * 60 * 60)));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_WEEK, Type.INT, (long) (7 * 24 * 60 * 60)));
// Math constants
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_E, Type.REAL, Math.E));
constants.add(new ScopeSymbol(Scope.GLOBAL_VAR_PI, Type.REAL, Math.PI));
// Add all constants
for (ScopeSymbol ss : constants) {
ss.setConstant(true);
globalScope.add(ss);
}
// Set "physical" path
String path;
try {
path = new File(".").getCanonicalPath();
} catch (IOException e) {
throw new RuntimeException("Cannot get cannonical path for current dir");
}
globalScope.add(new ScopeSymbol(ExpressionTask.TASK_OPTION_PHYSICAL_PATH, Type.STRING, path));
// Set all environment variables
Map<String, String> envMap = System.getenv();
for (String varName : envMap.keySet()) {
String varVal = envMap.get(varName);
globalScope.add(new ScopeSymbol(varName, Type.STRING, varVal));
}
// Command line arguments (default: empty list)
// This is properly set in 'initializeArgs()' method, but
// we have to set something now, otherwise we'll get a "variable
// not found" error at compiler time, if the program attempts
// to use 'args'.
Scope.getGlobalScope().add(new ScopeSymbol(Scope.GLOBAL_VAR_ARGS_LIST, TypeList.get(Type.STRING), new ArrayList<String>()));
}
/**
* Initialize standard libraries
*/
void initilaizeLibraries() {
if (debug) log("Initialize standard libraries.");
// Native functions
NativeLibraryFunctions nativeLibraryFunctions = new NativeLibraryFunctions();
if (debug) log("Native library:\n" + nativeLibraryFunctions);
// Native library: String
NativeLibraryString nativeLibraryString = new NativeLibraryString();
if (debug) log("Native library:\n" + nativeLibraryString);
}
/**
* Is this a command line option (e.g. "-tfam" is a command line option, but "-" means STDIN)
*/
protected boolean isOpt(String arg) {
return arg.startsWith("-") && (arg.length() > 1);
}
void log(String msg) {
Timer.showStdErr(getClass().getSimpleName() + ": " + msg);
}
/**
* Parse command line arguments
*/
public void parse(String[] args) {
// Nothing? Show command line options
if (args.length <= 0) usage(null);
programArgs = new ArrayList<>();
bdsAction = BdsAction.RUN;
for (int i = 0; i < args.length; i++) {
String arg = args[i];
if (programFileName != null) {
// Everything after 'programFileName' is an command line
// argument for the BigDataScript program
programArgs.add(arg);
} else if (isOpt(arg)) {
switch (arg.toLowerCase()) {
case "-checkpidregex":
checkPidRegex = true;
break;
case "-c":
case "-config":
// Checkpoint restore
if ((i + 1) < args.length) configFile = args[++i];
else usage("Option '-c' without restore file argument");
break;
case "-d":
case "-debug":
debug = verbose = true; // Debug implies verbose
break;
case "-download":
if ((i + 2) < args.length) {
config();
boolean ok = download(args[++i], args[++i]);
System.exit(ok ? 0 : 1);
} else usage("Option '-download' requires two parameters (URL and file)");
break;
case "-dryrun":
dryRun = true;
noRmOnExit = true; // Not running, so don't delete files
reportHtml = reportYaml = false; // Don't create reports
break;
case "-extractsource":
extractSource = true;
break;
case "-h":
case "-help":
case "--help":
usage(null);
break;
case "-i":
case "-info":
// Checkpoint info
if ((i + 1) < args.length) chekcpointRestoreFile = args[++i];
else usage("Option '-i' without checkpoint file argument");
bdsAction = BdsAction.INFO_CHECKPOINT;
break;
case "-l":
case "-log":
log = true;
break;
case "-nochp":
noCheckpoint = true;
break;
case "-noreport":
reportHtml = reportYaml = false;
break;
case "-noreporthtml":
reportHtml = false;
break;
case "-noreportyaml":
reportYaml = false;
break;
case "-normonexit":
noRmOnExit = true;
break;
case "-pid":
// PID file
if ((i + 1) < args.length) pidFile = args[++i];
else usage("Option '-pid' without file argument");
break;
case "-q":
case "-queue":
// Queue name
if ((i + 1) < args.length) queue = args[++i];
else usage("Option '-queue' without file argument");
break;
case "-quiet":
verbose = false;
debug = false;
quiet = true;
break;
case "-r":
case "-restore":
// Checkpoint restore
if ((i + 1) < args.length) chekcpointRestoreFile = args[++i];
else usage("Option '-r' without checkpoint file argument");
bdsAction = BdsAction.RUN_CHECKPOINT;
break;
case "-reporthtml":
reportHtml = true;
break;
case "-reportname":
if ((i + 1) < args.length) reportFileName = args[++i];
else usage("Option '-reportName' without name argument");
break;
case "-reportyaml":
case "-yaml":
reportYaml = true;
break;
case "-s":
case "-system":
// System type
if ((i + 1) < args.length) system = args[++i];
else usage("Option '-system' without file argument");
break;
case "-t":
case "-test":
bdsAction = BdsAction.TEST;
break;
case "-upload":
if ((i + 2) < args.length) {
config();
boolean ok = upload(args[++i], args[++i]);
System.exit(ok ? 0 : 1);
} else usage("Option '-upload' requires two parameters (file and URL)");
break;
case "-v":
case "-verbose":
verbose = true;
break;
case "-version":
System.out.println(VERSION);
System.exit(0);
break;
case "-wall":
warnUnusedFunctionsAnyFile = false;
break;
case "-y":
case "-retry":
// Number of retries
if ((i + 1) < args.length) taskFailCount = Gpr.parseIntSafe(args[++i]);
else usage("Option '-t' without number argument");
break;
default:
usage("Unknown command line option " + arg);
}
} else if (programFileName == null) programFileName = arg; // Get program file name
}
// Sanity checks
if (checkPidRegex) {
// OK: Nothing to chek
} else if ((programFileName == null) && (chekcpointRestoreFile == null)) {
// No file name => Error
usage("Missing program file name.");
}
}
/**
* Run script
*/
public int run() {
// Initialize
Executioners executioners = Executioners.getInstance(config);
TaskDependecies.reset();
// Check PID regex
if (checkPidRegex) {
checkPidRegex();
return 0;
}
//---
// Run
//---
int exitValue = 0;
switch (bdsAction) {
case RUN_CHECKPOINT:
exitValue = runCheckpoint();
break;
case INFO_CHECKPOINT:
exitValue = infoCheckpoint();
break;
case TEST:
exitValue = runTests();
break;
default:
exitValue = runCompile(); // Compile & run
}
if (verbose) Timer.showStdErr("Finished. Exit code: " + exitValue);
//---
// Kill all executioners
//---
for (Executioner executioner : executioners.getAll())
executioner.kill();
config.kill(); // Kill 'tail' and 'monitor' threads
return exitValue;
}
/**
* Restore from checkpoint and run
*/
int runCheckpoint() {
// Load checkpoint file
BdsSerializer bdsSerializer = new BdsSerializer(chekcpointRestoreFile, config);
List<BdsThread> bdsThreads = bdsSerializer.load();
// Set main thread's programUnit running scope (mostly for debugging and test cases)
// ProgramUnit's scope it the one before 'global'
BdsThread mainThread = bdsThreads.get(0);
programUnit = mainThread.getProgramUnit();
// Set state and recover tasks
for (BdsThread bdsThread : bdsThreads) {
if (bdsThread.isFinished()) {
// Thread finished before serialization: Nothing to do
} else {
bdsThread.setRunState(RunState.CHECKPOINT_RECOVER); // Set run state to recovery
bdsThread.restoreUnserializedTasks(); // Re-execute or add tasks
}
}
// All set, run main thread
return runThread(mainThread);
}
/**
* Compile and run
*/
int runCompile() {
// Compile, abort on errors
if (verbose) Timer.showStdErr("Parsing");
if (!compile()) {
// Show errors and warnings, if any
if (!CompilerMessages.get().isEmpty()) System.err.println("Compiler messages:\n" + CompilerMessages.get());
return 1;
}
if (verbose) Timer.showStdErr("Initializing");
BdsParseArgs bdsParseArgs = new BdsParseArgs(this);
bdsParseArgs.setDebug(debug);
bdsParseArgs.parse();
// Run the program
BdsThread bdsThread = new BdsThread(programUnit, config);
if (verbose) Timer.showStdErr("Process ID: " + bdsThread.getBdsThreadId());
// Show script's automatic help message
if (bdsParseArgs.isShowHelp()) {
if (verbose) Timer.showStdErr("Showing automaic 'help'");
HelpCreator hc = new HelpCreator(programUnit);
System.out.println(hc);
return 0;
}
if (verbose) Timer.showStdErr("Running");
int exitCode = runThread(bdsThread);
// Check stack
if (stackCheck) bdsThread.sanityCheckStack();
return exitCode;
}
/**
* Compile and run
*/
int runTests() {
// Compile, abort on errors
if (verbose) Timer.showStdErr("Parsing");
if (!compile()) {
// Show errors and warnings, if any
if (!CompilerMessages.get().isEmpty()) System.err.println("Compiler messages:\n" + CompilerMessages.get());
return 1;
}
if (verbose) Timer.showStdErr("Initializing");
BdsParseArgs bdsParseArgs = new BdsParseArgs(this);
bdsParseArgs.setDebug(debug);
bdsParseArgs.parse();
// Run the program
BdsThread bdsThread = new BdsThread(programUnit, config);
if (verbose) Timer.showStdErr("Process ID: " + bdsThread.getBdsThreadId());
if (verbose) Timer.showStdErr("Running tests");
ProgramUnit pu = bdsThread.getProgramUnit();
return runTests(pu);
}
/**
* For each "test*()" function in ProgramUnit, create a thread
* that executes the function's body
*/
int runTests(ProgramUnit progUnit) {
// We need to execute all variable declarations in order to be able to use global vairables in 'test*()' functions"
List<VarDeclaration> varDecls = programUnit.varDeclarations(false);
List<FunctionDeclaration> testFuncs = progUnit.testsFunctions();
int exitCode = 0;
int testOk = 0, testError = 0;
for (FunctionDeclaration testFunc : testFuncs) {
System.out.println("");
// Run each function
int exitValTest = runTests(progUnit, testFunc, varDecls);
// Show test result
if (exitValTest == 0) {
Timer.show("Test '" + testFunc.getFunctionName() + "': OK");
testOk++;
} else {
Timer.show("Test '" + testFunc.getFunctionName() + "': FAIL");
exitCode = 1;
testError++;
}
}
// Show results
System.out.println("");
Timer.show("Totals"//
+ "\n OK : " + testOk //
+ "\n ERROR : " + testError //
);
return exitCode;
}
/**
* Run a single test function, return exit code
*/
int runTests(ProgramUnit progUnit, FunctionDeclaration testFunc, List<VarDeclaration> varDecls) {
List<Statement> statements = new ArrayList<>();
// Add all variable declarations
for (VarDeclaration varDecl : varDecls)
statements.add(varDecl);
// Note: We execute the function's body (not the function declaration)
statements.add(testFunc.getStatement());
// Create a program unit having all variable declarations and the test function's statements
ProgramUnit puTest = new ProgramUnit(progUnit, null);
puTest.setStatements(statements.toArray(new Statement[0]));
BdsThread bdsTestThread = new BdsThread(puTest, config);
int exitValTest = runThread(bdsTestThread);
return exitValTest;
}
/**
* Run a thread
*/
int runThread(BdsThread bdsThread) {
this.bdsThread = bdsThread;
if (bdsThread.isFinished()) return 0;
bdsThread.start();
try {
bdsThread.join();
} catch (InterruptedException e) {
// Nothing to do?
// May be checkpoint?
return 1;
}
// Check stack
if (stackCheck) bdsThread.sanityCheckStack();
// OK, we are done
return bdsThread.getExitValue();
}
public void setStackCheck(boolean stackCheck) {
this.stackCheck = stackCheck;
}
/**
* Upload a local file to a URL
* @return true if successful
*/
public boolean upload(String fileName, String url) {
Data remote = Data.factory(url);
Data local = Data.factory(fileName);
// Sanity checks
if (!remote.isRemote()) {
System.err.println("Cannot upload to non-remote URL: " + url);
return false;
}
if (!local.isFile()) {
System.err.println("Cannot upload non-file: " + fileName);
return false;
}
if (!local.exists()) {
System.err.println("Local file does not exists: " + fileName);
return false;
}
if (!local.canRead()) {
System.err.println("Cannot read local file : " + fileName);
return false;
}
// Already uploaded? Nothing to do
if (remote.isUploaded(fileName)) {
if (verbose) System.err.println("Remote file is up to date, no upload required: " + url);
return true;
}
return remote.upload(fileName);
}
void usage(String err) {
if (err != null) System.err.println("Error: " + err);
System.out.println(VERSION + "\n");
System.err.println("Usage: " + Bds.class.getSimpleName() + " [options] file.bds");
System.err.println("\nAvailable options: ");
System.err.println(" [-c | -config ] bds.config : Config file. Default : " + configFile);
System.err.println(" [-checkPidRegex] : Check configuration's 'pidRegex' by matching stdin.");
System.err.println(" [-d | -debug ] : Debug mode.");
System.err.println(" -download url file : Download 'url' to local 'file'. Note: Used by 'taks'");
// System.err.println(" -done : Use 'done' files: Default: " + useDoneFile);
System.err.println(" -dryRun : Do not run any task, just show what would be run. Default: " + dryRun);
System.err.println(" [-extractSource] : Extract source code files from checkpoint (only valid combined with '-info').");
System.err.println(" [-i | -info ] checkpoint.chp : Show state information in checkpoint file.");
System.err.println(" [-l | -log ] : Log all tasks (do not delete tmp files). Default: " + log);
System.err.println(" -noChp : Do not create any checkpoint files.");
System.err.println(" -noRmOnExit : Do not remove files marked for deletion on exit (rmOnExit). Default: " + noRmOnExit);
System.err.println(" [-q | -queue ] queueName : Set default queue name.");
System.err.println(" -quiet : Do not show any messages or tasks outputs on STDOUT. Default: " + quiet);
System.err.println(" -reportHtml : Create HTML report. Default: " + reportHtml);
System.err.println(" -reportName <name> : Set base-name for report files.");
System.err.println(" -reportYaml : Create YAML report. Default: " + reportYaml);
System.err.println(" [-r | -restore] checkpoint.chp : Restore state from checkpoint file.");
System.err.println(" [-s | -system ] type : Set system type.");
System.err.println(" [-t | -test ] : Run user test cases (runs all test* functions).");
System.err.println(" -upload file url : Upload local file to 'url'. Note: Used by 'taks'");
System.err.println(" [-v | -verbose] : Be verbose.");
System.err.println(" -version : Show version and exit.");
System.err.println(" -wall : Show all compile time warnings.");
System.err.println(" [-y | -retry ] num : Number of times to retry a failing tasks.");
System.err.println(" -pid <file> : Write local processes PIDs to 'file'");
if (err != null) System.exit(1);
System.exit(0);
}
}
|
Added compile time warning for 'unused functions'. Only warns for main module, but can be activated for all inclides using '-wall'
|
src/org/bds/Bds.java
|
Added compile time warning for 'unused functions'. Only warns for main module, but can be activated for all inclides using '-wall'
|
|
Java
|
apache-2.0
|
953f283bcec3dcbbdc1f03267e1ac32f3c7c78b7
| 0
|
xmpace/jetty-read,xmpace/jetty-read,xmpace/jetty-read,xmpace/jetty-read
|
//
// ========================================================================
// Copyright (c) 1995-2012 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.client;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.client.api.Connection;
import org.eclipse.jetty.client.api.Request;
import org.eclipse.jetty.client.api.Response;
import org.eclipse.jetty.client.api.Result;
import org.eclipse.jetty.client.util.BytesContentProvider;
import org.eclipse.jetty.http.HttpMethod;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.eclipse.jetty.toolchain.test.annotation.Slow;
import org.eclipse.jetty.toolchain.test.annotation.Stress;
import org.eclipse.jetty.util.IO;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.junit.Assert;
import org.junit.Test;
public class HttpClientLoadTest extends AbstractHttpClientServerTest
{
private final Logger logger = Log.getLogger(HttpClientLoadTest.class);
public HttpClientLoadTest(SslContextFactory sslContextFactory)
{
super(sslContextFactory);
}
@Stress("High I/O, High CPU")
@Slow
@Test
public void testIterative() throws Exception
{
start(new LoadHandler());
client.setMaxConnectionsPerAddress(32768);
client.setMaxQueueSizePerAddress(1024 * 1024);
Random random = new Random();
int iterations = 1000;
CountDownLatch latch = new CountDownLatch(iterations);
List<String> failures = new ArrayList<>();
// Dumps the state of the client if the test takes too long
final Thread testThread = Thread.currentThread();
client.getScheduler().schedule(new Runnable()
{
@Override
public void run()
{
for (String host : Arrays.asList("localhost", "127.0.0.1"))
{
HttpDestination destination = (HttpDestination)client.getDestination(scheme, host, connector.getLocalPort());
for (Connection connection : new ArrayList<>(destination.getActiveConnections()))
{
HttpConnection active = (HttpConnection)connection;
logger.warn(active.getEndPoint() + " exchange " + active.getExchange());
}
}
testThread.interrupt();
}
}, iterations * ("http".equalsIgnoreCase(scheme) ? 10 : 500), TimeUnit.MILLISECONDS);
long begin = System.nanoTime();
for (int i = 0; i < iterations; ++i)
{
test(random, latch, failures);
}
Assert.assertTrue(latch.await(iterations, TimeUnit.SECONDS));
long end = System.nanoTime();
long elapsed = TimeUnit.NANOSECONDS.toMillis(end - begin);
logger.info("{} requests in {} ms, {} req/s", iterations, elapsed, elapsed > 0 ? iterations * 1000 / elapsed : -1);
Assert.assertTrue(failures.toString(), failures.isEmpty());
}
private void test(Random random, final CountDownLatch latch, final List<String> failures)
{
int maxContentLength = 64 * 1024;
// Choose a random destination
String host = random.nextBoolean() ? "localhost" : "127.0.0.1";
Request request = client.newRequest(host, connector.getLocalPort()).scheme(scheme);
// Choose a random method
HttpMethod method = random.nextBoolean() ? HttpMethod.GET : HttpMethod.POST;
request.method(method);
// Choose randomly whether to close the connection on the client or on the server
if (random.nextBoolean())
request.header("Connection", "close");
else if (random.nextBoolean())
request.header("X-Close", "true");
switch (method)
{
case GET:
// Randomly ask the server to download data upon this GET request
if (random.nextBoolean())
request.header("X-Download", String.valueOf(random.nextInt(maxContentLength) + 1));
break;
case POST:
int contentLength = random.nextInt(maxContentLength) + 1;
request.header("X-Upload", String.valueOf(contentLength));
request.content(new BytesContentProvider(new byte[contentLength]));
break;
}
request.send(new Response.Listener.Empty()
{
private final AtomicInteger contentLength = new AtomicInteger();
@Override
public void onHeaders(Response response)
{
String content = response.headers().get("X-Content");
if (content != null)
contentLength.set(Integer.parseInt(content));
}
@Override
public void onContent(Response response, ByteBuffer content)
{
contentLength.addAndGet(-content.remaining());
}
@Override
public void onComplete(Result result)
{
if (result.isFailed())
failures.add("Result failed " + result);
if (contentLength.get() != 0)
failures.add("Content length mismatch " + contentLength);
latch.countDown();
}
});
}
private class LoadHandler extends AbstractHandler
{
@Override
public void handle(String target, org.eclipse.jetty.server.Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
{
String method = request.getMethod().toUpperCase();
switch (method)
{
case "GET":
int contentLength = request.getIntHeader("X-Download");
if (contentLength > 0)
{
response.setHeader("X-Content", String.valueOf(contentLength));
response.getOutputStream().write(new byte[contentLength]);
}
break;
case "POST":
response.setHeader("X-Content", request.getHeader("X-Upload"));
IO.copy(request.getInputStream(), response.getOutputStream());
break;
}
if (Boolean.parseBoolean(request.getHeader("X-Close")))
response.setHeader("Connection", "close");
baseRequest.setHandled(true);
}
}
}
|
jetty-client/src/test/java/org/eclipse/jetty/client/HttpClientLoadTest.java
|
//
// ========================================================================
// Copyright (c) 1995-2012 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.client;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.client.api.Connection;
import org.eclipse.jetty.client.api.Request;
import org.eclipse.jetty.client.api.Response;
import org.eclipse.jetty.client.api.Result;
import org.eclipse.jetty.client.util.BytesContentProvider;
import org.eclipse.jetty.http.HttpMethod;
import org.eclipse.jetty.server.handler.AbstractHandler;
import org.eclipse.jetty.toolchain.test.annotation.Slow;
import org.eclipse.jetty.toolchain.test.annotation.Stress;
import org.eclipse.jetty.util.IO;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.junit.Assert;
import org.junit.Test;
public class HttpClientLoadTest extends AbstractHttpClientServerTest
{
private final Logger logger = Log.getLogger(HttpClientLoadTest.class);
public HttpClientLoadTest(SslContextFactory sslContextFactory)
{
super(sslContextFactory);
}
@Stress("High I/O, High CPU")
@Slow
@Test
public void testIterative() throws Exception
{
start(new LoadHandler());
client.setMaxConnectionsPerAddress(32768);
client.setMaxQueueSizePerAddress(1024 * 1024);
Random random = new Random();
int iterations = 1000;
CountDownLatch latch = new CountDownLatch(iterations);
List<String> failures = new ArrayList<>();
// Dumps the state of the client if the test takes too long
final Thread testThread = Thread.currentThread();
client.getScheduler().schedule(new Runnable()
{
@Override
public void run()
{
for (String host : Arrays.asList("localhost", "127.0.0.1"))
{
HttpDestination destination = (HttpDestination)client.getDestination(scheme, host, connector.getLocalPort());
for (Connection connection : new ArrayList<>(destination.getActiveConnections()))
{
HttpConnection active = (HttpConnection)connection;
logger.warn(active.getEndPoint() + " exchange " + active.getExchange());
}
}
testThread.interrupt();
}
}, iterations * ("http".equalsIgnoreCase(scheme) ? 10 : 200), TimeUnit.MILLISECONDS);
long begin = System.nanoTime();
for (int i = 0; i < iterations; ++i)
{
test(random, latch, failures);
}
Assert.assertTrue(latch.await(iterations, TimeUnit.SECONDS));
long end = System.nanoTime();
long elapsed = TimeUnit.NANOSECONDS.toMillis(end - begin);
logger.info("{} requests in {} ms, {} req/s", iterations, elapsed, elapsed > 0 ? iterations * 1000 / elapsed : -1);
Assert.assertTrue(failures.toString(), failures.isEmpty());
}
private void test(Random random, final CountDownLatch latch, final List<String> failures)
{
int maxContentLength = 64 * 1024;
// Choose a random destination
String host = random.nextBoolean() ? "localhost" : "127.0.0.1";
Request request = client.newRequest(host, connector.getLocalPort()).scheme(scheme);
// Choose a random method
HttpMethod method = random.nextBoolean() ? HttpMethod.GET : HttpMethod.POST;
request.method(method);
// Choose randomly whether to close the connection on the client or on the server
if (random.nextBoolean())
request.header("Connection", "close");
else if (random.nextBoolean())
request.header("X-Close", "true");
switch (method)
{
case GET:
// Randomly ask the server to download data upon this GET request
if (random.nextBoolean())
request.header("X-Download", String.valueOf(random.nextInt(maxContentLength) + 1));
break;
case POST:
int contentLength = random.nextInt(maxContentLength) + 1;
request.header("X-Upload", String.valueOf(contentLength));
request.content(new BytesContentProvider(new byte[contentLength]));
break;
}
request.send(new Response.Listener.Empty()
{
private final AtomicInteger contentLength = new AtomicInteger();
@Override
public void onHeaders(Response response)
{
String content = response.headers().get("X-Content");
if (content != null)
contentLength.set(Integer.parseInt(content));
}
@Override
public void onContent(Response response, ByteBuffer content)
{
contentLength.addAndGet(-content.remaining());
}
@Override
public void onComplete(Result result)
{
if (result.isFailed())
failures.add("Result failed " + result);
if (contentLength.get() != 0)
failures.add("Content length mismatch " + contentLength);
latch.countDown();
}
});
}
private class LoadHandler extends AbstractHandler
{
@Override
public void handle(String target, org.eclipse.jetty.server.Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
{
String method = request.getMethod().toUpperCase();
switch (method)
{
case "GET":
int contentLength = request.getIntHeader("X-Download");
if (contentLength > 0)
{
response.setHeader("X-Content", String.valueOf(contentLength));
response.getOutputStream().write(new byte[contentLength]);
}
break;
case "POST":
response.setHeader("X-Content", request.getHeader("X-Upload"));
IO.copy(request.getInputStream(), response.getOutputStream());
break;
}
if (Boolean.parseBoolean(request.getHeader("X-Close")))
response.setHeader("Connection", "close");
baseRequest.setHandled(true);
}
}
}
|
jetty-9 - HTTP client: make SSL load test to wait more for completion.
|
jetty-client/src/test/java/org/eclipse/jetty/client/HttpClientLoadTest.java
|
jetty-9 - HTTP client: make SSL load test to wait more for completion.
|
|
Java
|
apache-2.0
|
a9fcdf1ff997c1d709e9076d1aa4a865e134a72e
| 0
|
wyona/yanel,baszero/yanel,baszero/yanel,baszero/yanel,wyona/yanel,wyona/yanel,wyona/yanel,baszero/yanel,baszero/yanel,baszero/yanel,wyona/yanel,wyona/yanel
|
/*
* Copyright 2007 Wyona
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.wyona.org/licenses/APACHE-LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wyona.yanel.core;
import java.io.File;
import java.lang.ClassNotFoundException;
import java.lang.IllegalAccessException;
import java.lang.InstantiationException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Properties;
import org.apache.avalon.framework.configuration.Configuration;
import org.apache.avalon.framework.configuration.DefaultConfigurationBuilder;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.FalseFileFilter;
import org.apache.commons.io.filefilter.WildcardFilter;
import org.apache.log4j.Category;
import org.wyona.commons.io.FileUtil;
import org.wyona.yanel.core.map.Map;
import org.wyona.yanel.core.map.Realm;
import org.wyona.yanel.core.map.RealmManager;
/**
*
*/
public class ResourceTypeRegistry {
private static Category log = Category.getInstance(ResourceTypeRegistry.class);
public static String CONFIGURATION_FILE = Yanel.DEFAULT_CONFIGURATION_FILE;
public static String RESOURCE_DEFAULT_CONFIG_NAME = "resource.xml";
private URL propertiesURL;
private File configFile;
private File resourceTypeConfigFile;
java.util.HashMap hm = new java.util.HashMap();
/**
*
*/
public ResourceTypeRegistry() {
this(Yanel.DEFAULT_CONFIGURATION_FILE_XML);
}
/**
*
*/
public ResourceTypeRegistry(String configurationFile) {
CONFIGURATION_FILE = configurationFile;
if (RealmManager.class.getClassLoader().getResource(CONFIGURATION_FILE) == null) {
CONFIGURATION_FILE = Yanel.DEFAULT_CONFIGURATION_FILE;
}
if (ResourceTypeRegistry.class.getClassLoader().getResource(CONFIGURATION_FILE) != null) {
if (CONFIGURATION_FILE.endsWith(".xml")) {
try {
// It seems like one can also use URI instead URLDecoder in order to avoid the Windows issue if a path contains spaces
URI configFileUri = new URI(RealmManager.class.getClassLoader().getResource(CONFIGURATION_FILE).toString());
configFile = new File(configFileUri.getPath());
} catch (Exception e) {
log.error("Failure while reading configuration: " + e.getMessage(), e);
}
try {
DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder();
Configuration config;
config = builder.buildFromFile(configFile);
resourceTypeConfigFile = new File(config.getChild("resource-types-config").getAttribute("src"));
if (!resourceTypeConfigFile.isAbsolute()) {
resourceTypeConfigFile = FileUtil.file(configFile.getParentFile().getAbsolutePath(), resourceTypeConfigFile.toString());
}
log.debug("Realms Configuration: " + resourceTypeConfigFile);
readResourceTypes();
} catch (Exception e) {
String errorMsg = "Failure while reading configuration: " + e.getMessage();
log.error(errorMsg, e);
}
} else if (CONFIGURATION_FILE.endsWith("properties")) {
log.warn("DEPRECATED: " + CONFIGURATION_FILE);
propertiesURL = ResourceTypeRegistry.class.getClassLoader().getResource(CONFIGURATION_FILE);
Properties props = new Properties();
try {
props.load(propertiesURL.openStream());
// use URLDecoder to avoid problems when the filename contains spaces, see
// http://bugzilla.wyona.com/cgi-bin/bugzilla/show_bug.cgi?id=5165
File propsFile = new File(URLDecoder.decode(propertiesURL.getFile()));
String separator = ",";
String[] tokens = props.getProperty("resources").split(separator);
for (int i = 0; i < tokens.length; i++) {
File resConfigFile = new File(tokens[i]);
if (!resConfigFile.isAbsolute()) {
resConfigFile = FileUtil.file(propsFile.getParentFile().getAbsolutePath(), tokens[i]);
}
if (resConfigFile.isDirectory()) {
resConfigFile = new File(resConfigFile, RESOURCE_DEFAULT_CONFIG_NAME);
}
if (resConfigFile.isFile()) {
ResourceTypeDefinition rtd = new ResourceTypeDefinition(resConfigFile);
log.debug("Universal Name: " + rtd.getResourceTypeUniversalName());
log.debug("Classname: " + rtd.getResourceTypeClassname());
hm.put(rtd.getResourceTypeUniversalName(), rtd);
} else {
log.error("No such file or directory: " + resConfigFile);
}
}
} catch (Exception e) {
log.error(e);
}
} else {
log.error(CONFIGURATION_FILE + "have to be either .xml or .properties");
}
} else {
log.error("No such config file" + CONFIGURATION_FILE);
}
}
/**
*
*/
public void readResourceTypes() throws ConfigurationException {
try {
DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder();
Configuration config;
config = builder.buildFromFile(resourceTypeConfigFile);
Configuration resourceTypes[] = config.getChildren("resource-type");
for (int i = 0; i < resourceTypes.length; i++) {
try {
String packageName = resourceTypes[i].getAttribute("package");
log.debug("Package: " + packageName);
// TODO: Wildcard: resource*.xml !?
// TODO: Config itself, e.g. org/wyona/yanel/impl/resources/redirect/my-resource.xml
URL resourceURL = ResourceTypeRegistry.class.getClassLoader().getResource(packageName.replace('.','/') + "/resource.xml");
log.info("Resource config URL: " + resourceURL);
try {
ResourceTypeDefinition rtd = new ResourceTypeDefinition(resourceURL.openStream());
log.debug("Universal Name: " + rtd.getResourceTypeUniversalName());
log.debug("Classname: " + rtd.getResourceTypeClassname());
hm.put(rtd.getResourceTypeUniversalName(), rtd);
} catch (Exception exception) {
log.error("Exception re registring resource with package: " + packageName);
log.error(exception.getMessage(), exception);
}
} catch (Exception e) {
File resConfigFile = new File(resourceTypes[i].getAttribute("src"));
if (!resConfigFile.isAbsolute()) {
resConfigFile = FileUtil.file(resourceTypeConfigFile.getParentFile().getAbsolutePath(), resourceTypes[i].getAttribute("src"));
}
if (resConfigFile.isDirectory()) {
File resDir = resConfigFile;
Iterator iter = FileUtils.listFiles(resDir, new WildcardFilter("resource*.xml"), null).iterator();
while (iter.hasNext()) {
resConfigFile = (File)iter.next();
log.debug("found resource config: " + resConfigFile);
ResourceTypeDefinition rtd = new ResourceTypeDefinition(resConfigFile);
if (log.isDebugEnabled()) {
log.debug("Universal Name: " + rtd.getResourceTypeUniversalName());
log.debug("Classname: " + rtd.getResourceTypeClassname());
}
hm.put(rtd.getResourceTypeUniversalName(), rtd);
}
} else if (resConfigFile.isFile()) {
ResourceTypeDefinition rtd = new ResourceTypeDefinition(resConfigFile);
if (log.isDebugEnabled()) {
log.debug("Universal Name: " + rtd.getResourceTypeUniversalName());
log.debug("Classname: " + rtd.getResourceTypeClassname());
}
hm.put(rtd.getResourceTypeUniversalName(), rtd);
} else {
log.error("No such file or directory: " + resConfigFile);
}
}
}
} catch (Exception e) {
String errorMsg = "Failure while reading configuration: " + e.getMessage();
log.error(errorMsg, e);
throw new ConfigurationException(errorMsg, e);
}
}
/**
*
*/
public ResourceTypeDefinition getResourceTypeDefinition(String universalName) throws Exception {
if (!hm.containsKey(universalName)) {
throw new Exception("Unknown resource type: " + universalName);
}
return (ResourceTypeDefinition) hm.get(universalName);
}
/**
*
*/
public ResourceTypeDefinition[] getResourceTypeDefinitions() {
java.util.Set keys = hm.keySet();
java.util.Iterator keysIterator = keys.iterator();
ResourceTypeDefinition[] rtds = new ResourceTypeDefinition[keys.size()];
int i = 0;
while (keysIterator.hasNext()) {
String universalName = (String) keysIterator.next();
rtds[i] = (ResourceTypeDefinition)hm.get(universalName);
i++;
}
return rtds;
}
/**
* @deprecated
*/
public Resource newResource(String universalName) throws ClassNotFoundException, InstantiationException, IllegalAccessException {
ResourceTypeDefinition rtd = (ResourceTypeDefinition) hm.get(universalName);
if (rtd != null) {
Resource resource = (Resource) Class.forName(rtd.getResourceTypeClassname()).newInstance();
resource.setRTD(rtd);
// TODO: Set Yanel instance ... but Yanel should be a singleton, because it instantiates the Map ... see Cmdl and Servlet ...
//resource.setYanel(...);
return resource;
} else {
log.error("No resource registered for rti: " + universalName);
return null;
}
}
/**
*
*/
public String getConfigurationFile() {
return CONFIGURATION_FILE;
}
}
|
src/core/java/org/wyona/yanel/core/ResourceTypeRegistry.java
|
/*
* Copyright 2007 Wyona
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.wyona.org/licenses/APACHE-LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wyona.yanel.core;
import java.io.File;
import java.lang.ClassNotFoundException;
import java.lang.IllegalAccessException;
import java.lang.InstantiationException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Properties;
import org.apache.avalon.framework.configuration.Configuration;
import org.apache.avalon.framework.configuration.DefaultConfigurationBuilder;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.filefilter.FalseFileFilter;
import org.apache.commons.io.filefilter.WildcardFilter;
import org.apache.log4j.Category;
import org.wyona.commons.io.FileUtil;
import org.wyona.yanel.core.map.Map;
import org.wyona.yanel.core.map.Realm;
import org.wyona.yanel.core.map.RealmManager;
/**
*
*/
public class ResourceTypeRegistry {
private static Category log = Category.getInstance(ResourceTypeRegistry.class);
public static String CONFIGURATION_FILE = Yanel.DEFAULT_CONFIGURATION_FILE;
public static String RESOURCE_DEFAULT_CONFIG_NAME = "resource.xml";
private URL propertiesURL;
private File configFile;
private File resourceTypeConfigFile;
java.util.HashMap hm = new java.util.HashMap();
/**
*
*/
public ResourceTypeRegistry() {
this(Yanel.DEFAULT_CONFIGURATION_FILE_XML);
}
/**
*
*/
public ResourceTypeRegistry(String configurationFile) {
CONFIGURATION_FILE = configurationFile;
if (RealmManager.class.getClassLoader().getResource(CONFIGURATION_FILE) == null) {
CONFIGURATION_FILE = Yanel.DEFAULT_CONFIGURATION_FILE;
}
if (ResourceTypeRegistry.class.getClassLoader().getResource(CONFIGURATION_FILE) != null) {
if (CONFIGURATION_FILE.endsWith(".xml")) {
try {
// It seems like one can also use URI instead URLDecoder in order to avoid the Windows issue if a path contains spaces
URI configFileUri = new URI(RealmManager.class.getClassLoader().getResource(CONFIGURATION_FILE).toString());
configFile = new File(configFileUri.getPath());
} catch (Exception e) {
log.error("Failure while reading configuration: " + e.getMessage(), e);
}
try {
DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder();
Configuration config;
config = builder.buildFromFile(configFile);
resourceTypeConfigFile = new File(config.getChild("resource-types-config").getAttribute("src"));
if (!resourceTypeConfigFile.isAbsolute()) {
resourceTypeConfigFile = FileUtil.file(configFile.getParentFile().getAbsolutePath(), resourceTypeConfigFile.toString());
}
log.debug("Realms Configuration: " + resourceTypeConfigFile);
readResourceTypes();
} catch (Exception e) {
String errorMsg = "Failure while reading configuration: " + e.getMessage();
log.error(errorMsg, e);
}
} else if (CONFIGURATION_FILE.endsWith("properties")) {
log.warn("DEPRECATED: " + CONFIGURATION_FILE);
propertiesURL = ResourceTypeRegistry.class.getClassLoader().getResource(CONFIGURATION_FILE);
Properties props = new Properties();
try {
props.load(propertiesURL.openStream());
// use URLDecoder to avoid problems when the filename contains spaces, see
// http://bugzilla.wyona.com/cgi-bin/bugzilla/show_bug.cgi?id=5165
File propsFile = new File(URLDecoder.decode(propertiesURL.getFile()));
String separator = ",";
String[] tokens = props.getProperty("resources").split(separator);
for (int i = 0; i < tokens.length; i++) {
File resConfigFile = new File(tokens[i]);
if (!resConfigFile.isAbsolute()) {
resConfigFile = FileUtil.file(propsFile.getParentFile().getAbsolutePath(), tokens[i]);
}
if (resConfigFile.isDirectory()) {
resConfigFile = new File(resConfigFile, RESOURCE_DEFAULT_CONFIG_NAME);
}
if (resConfigFile.isFile()) {
ResourceTypeDefinition rtd = new ResourceTypeDefinition(resConfigFile);
log.debug("Universal Name: " + rtd.getResourceTypeUniversalName());
log.debug("Classname: " + rtd.getResourceTypeClassname());
hm.put(rtd.getResourceTypeUniversalName(), rtd);
} else {
log.error("No such file or directory: " + resConfigFile);
}
}
} catch (Exception e) {
log.error(e);
}
} else {
log.error(CONFIGURATION_FILE + "have to be either .xml or .properties");
}
} else {
log.error("No such config file" + CONFIGURATION_FILE);
}
}
/**
*
*/
public void readResourceTypes() throws ConfigurationException {
try {
DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder();
Configuration config;
config = builder.buildFromFile(resourceTypeConfigFile);
Configuration resourceTypes[] = config.getChildren("resource-type");
for (int i = 0; i < resourceTypes.length; i++) {
try {
File resConfigFile = new File(resourceTypes[i].getAttribute("src"));
if (!resConfigFile.isAbsolute()) {
resConfigFile = FileUtil.file(resourceTypeConfigFile.getParentFile().getAbsolutePath(), resourceTypes[i].getAttribute("src"));
}
if (resConfigFile.isDirectory()) {
File resDir = resConfigFile;
Iterator iter = FileUtils.listFiles(resDir, new WildcardFilter("resource*.xml"), null).iterator();
while (iter.hasNext()) {
resConfigFile = (File)iter.next();
log.debug("found resource config: " + resConfigFile);
ResourceTypeDefinition rtd = new ResourceTypeDefinition(resConfigFile);
log.debug("Universal Name: " + rtd.getResourceTypeUniversalName());
log.debug("Classname: " + rtd.getResourceTypeClassname());
hm.put(rtd.getResourceTypeUniversalName(), rtd);
}
} else if (resConfigFile.isFile()) {
ResourceTypeDefinition rtd = new ResourceTypeDefinition(resConfigFile);
log.debug("Universal Name: " + rtd.getResourceTypeUniversalName());
log.debug("Classname: " + rtd.getResourceTypeClassname());
hm.put(rtd.getResourceTypeUniversalName(), rtd);
} else {
log.error("No such file or directory: " + resConfigFile);
}
} catch (Exception e) {
String packageName = resourceTypes[i].getAttribute("package");
log.debug("Package: " + packageName);
// TODO: Wildcard: resource*.xml !?
// TODO: Config itself, e.g. org/wyona/yanel/impl/resources/redirect/my-resource.xml
URL resourceURL = ResourceTypeRegistry.class.getClassLoader().getResource(packageName.replace('.','/') + "/resource.xml");
log.info("Resource config URL: " + resourceURL);
try {
ResourceTypeDefinition rtd = new ResourceTypeDefinition(resourceURL.openStream());
log.debug("Universal Name: " + rtd.getResourceTypeUniversalName());
log.debug("Classname: " + rtd.getResourceTypeClassname());
hm.put(rtd.getResourceTypeUniversalName(), rtd);
} catch (Exception exception) {
log.error("Exception re registring resource with package: " + packageName);
log.error(exception.getMessage(), exception);
}
}
}
} catch (Exception e) {
String errorMsg = "Failure while reading configuration: " + e.getMessage();
log.error(errorMsg, e);
throw new ConfigurationException(errorMsg, e);
}
}
/**
*
*/
public ResourceTypeDefinition getResourceTypeDefinition(String universalName) throws Exception {
if (!hm.containsKey(universalName)) {
throw new Exception("Unknown resource type: " + universalName);
}
return (ResourceTypeDefinition) hm.get(universalName);
}
/**
*
*/
public ResourceTypeDefinition[] getResourceTypeDefinitions() {
java.util.Set keys = hm.keySet();
java.util.Iterator keysIterator = keys.iterator();
ResourceTypeDefinition[] rtds = new ResourceTypeDefinition[keys.size()];
int i = 0;
while (keysIterator.hasNext()) {
String universalName = (String) keysIterator.next();
rtds[i] = (ResourceTypeDefinition)hm.get(universalName);
i++;
}
return rtds;
}
/**
* @deprecated
*/
public Resource newResource(String universalName) throws ClassNotFoundException, InstantiationException, IllegalAccessException {
ResourceTypeDefinition rtd = (ResourceTypeDefinition) hm.get(universalName);
if (rtd != null) {
Resource resource = (Resource) Class.forName(rtd.getResourceTypeClassname()).newInstance();
resource.setRTD(rtd);
// TODO: Set Yanel instance ... but Yanel should be a singleton, because it instantiates the Map ... see Cmdl and Servlet ...
//resource.setYanel(...);
return resource;
} else {
log.error("No resource registered for rti: " + universalName);
return null;
}
}
/**
*
*/
public String getConfigurationFile() {
return CONFIGURATION_FILE;
}
}
|
check package first
|
src/core/java/org/wyona/yanel/core/ResourceTypeRegistry.java
|
check package first
|
|
Java
|
apache-2.0
|
3f9edecadd04687a5625b622a76f95d98a19d056
| 0
|
tkowalcz/rx-java-pantha-rhei
|
package pl.tkowalcz.twitter.mock;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.List;
import com.google.common.collect.ImmutableList;
import pl.tkowalcz.twitter.TwitterUser;
import rx.Observable;
import rx.schedulers.Schedulers;
public class MockRetroTwitter {
private final List<TwitterUser> users;
public MockRetroTwitter() {
ImmutableList.Builder<TwitterUser> builder = ImmutableList.builder();
try {
ObjectInputStream ois = new ObjectInputStream(new FileInputStream("src/main/resources/users.bin"));
for (int i = 0; i < 10; i++) {
builder.add((TwitterUser) ois.readObject());
}
} catch (IOException | ClassNotFoundException e) {
throw new RuntimeException(e);
}
users = builder.build();
}
public Observable<List<TwitterUser>> searchUsers(String prefix) {
return Observable.just(users).skip(1).subscribeOn(Schedulers.newThread());
}
}
|
src/main/java/pl/tkowalcz/twitter/mock/MockRetroTwitter.java
|
package pl.tkowalcz.twitter.mock;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.List;
import com.google.common.collect.ImmutableList;
import pl.tkowalcz.twitter.TwitterUser;
import rx.Observable;
import rx.schedulers.Schedulers;
public class MockRetroTwitter {
private final List<TwitterUser> users;
public MockRetroTwitter() {
ImmutableList.Builder<TwitterUser> builder = ImmutableList.builder();
try {
ObjectInputStream ois = new ObjectInputStream(new FileInputStream("src/main/resources/users.bin"));
for (int i = 0; i < 10; i++) {
builder.add((TwitterUser) ois.readObject());
}
} catch (IOException | ClassNotFoundException e) {
throw new RuntimeException(e);
}
users = builder.build();
}
public Observable<List<TwitterUser>> searchUsers(String prefix) {
return Observable.just(users).subscribeOn(Schedulers.newThread());
}
}
|
Fixes for mock.
|
src/main/java/pl/tkowalcz/twitter/mock/MockRetroTwitter.java
|
Fixes for mock.
|
|
Java
|
apache-2.0
|
aefe97e709e45579513a8ec10b37da057e23e8cb
| 0
|
pponec/ujorm,pponec/ujorm,pponec/ujorm
|
/*
* Copyright 2009 Paul Ponec
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ujoframework.orm;
import java.util.Collections;
import java.util.List;
import org.ujoframework.Ujo;
import org.ujoframework.UjoProperty;
import org.ujoframework.extensions.ListUjoProperty;
import org.ujoframework.extensions.UjoAction;
import org.ujoframework.implementation.quick.QuickUjo;
/**
* Abstract Metamodel
* @author Pavel Ponec
*/
abstract public class AbstractMetaModel extends QuickUjo {
/** Read-only state */
private boolean readOnly = false;
/** Property values can be readed only */
public boolean readOnly() {
return readOnly;
}
/** Set a read-only state. */
@SuppressWarnings("unchecked")
public void setReadOnly(boolean recurse) {
if (readOnly) return;
for (UjoProperty p : readProperties()) {
if (p instanceof ListUjoProperty) {
final List list = (List) p.of(this);
p.setValue(this, list!=null
? Collections.unmodifiableList(list)
: Collections.EMPTY_LIST
);
}
}
this.readOnly = true; // <<<<<< LOCK THE OBJECT !!!
if (recurse) for (UjoProperty p : readProperties()) {
Object value = p.getValue(this);
if (value instanceof AbstractMetaModel) {
((AbstractMetaModel) value).setReadOnly(recurse);
}
else if (p instanceof ListUjoProperty) {
if ( AbstractMetaModel.class.isAssignableFrom( ((ListUjoProperty)p).getItemType())) {
for (AbstractMetaModel m : ((ListUjoProperty<AbstractMetaModel,AbstractMetaModel>)p).getList(this) ) {
m.setReadOnly(recurse);
}
}
}
}
}
/** Test a read-only state */
public boolean testReadOnly(final boolean exception) throws UnsupportedOperationException {
if (readOnly && exception) {
throw new UnsupportedOperationException("Object have got a read-only state");
}
return readOnly;
}
@Override
public void writeValue(final UjoProperty property, final Object value) {
this.testReadOnly(true);
super.writeValue(property, value);
}
/** Assign a 'valid value' over a default UJO property value only */
protected <UJO extends Ujo, VALUE> void changeDefault
( final UJO ujo
, final UjoProperty<UJO, VALUE> property
, final VALUE value
) {
if (property.isDefault(ujo) && isUsable(value)) {
property.setValue(ujo, value);
}
}
@Override
@SuppressWarnings("unchecked")
public boolean readAuthorization(UjoAction action, UjoProperty property, Object value) {
if (action.getType()==UjoAction.ACTION_XML_EXPORT) {
return !property.isDefault(this);
}
return super.readAuthorization(action, property, value);
}
/** Returns true, if the argument text is not null and not empty. */
protected boolean isUsable(final CharSequence text) {
final boolean result = text!=null && text.length()>0;
return result;
}
/** Returns true, if the argument text is not null and not empty. */
protected boolean isUsable(final Object value) {
final boolean result = value instanceof CharSequence
? isUsable((CharSequence)value)
: value!=null
;
return result;
}
/** Getter based on one UjoProperty */
@SuppressWarnings("unchecked")
public <UJO extends AbstractMetaModel, VALUE> VALUE get ( UjoProperty<UJO, VALUE> property) {
return property.of((UJO) this);
}
}
|
ujo-core/src-orm/org/ujoframework/orm/AbstractMetaModel.java
|
/*
* Copyright 2009 Paul Ponec
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ujoframework.orm;
import java.util.Collections;
import java.util.List;
import org.ujoframework.Ujo;
import org.ujoframework.UjoProperty;
import org.ujoframework.extensions.ListUjoProperty;
import org.ujoframework.extensions.UjoAction;
import org.ujoframework.implementation.quick.QuickUjo;
/**
* Abstract Metamodel
* @author Pavel Ponec
*/
abstract public class AbstractMetaModel extends QuickUjo {
/** Read-only state */
private boolean readOnly = false;
/** Property values can be readed only */
public boolean readOnly() {
return readOnly;
}
/** Set a read-only state. */
@SuppressWarnings("unchecked")
public void setReadOnly(boolean recurse) {
if (readOnly) return;
for (UjoProperty p : readProperties()) {
if (p instanceof ListUjoProperty) {
final List list = (List) p.of(this);
p.setValue(this, list!=null
? Collections.unmodifiableList(list)
: Collections.EMPTY_LIST
);
}
}
this.readOnly = true; // <<<<<< LOCK THE OBJECT !!!
if (recurse) for (UjoProperty p : readProperties()) {
Object value = p.getValue(this);
if (value instanceof AbstractMetaModel) {
((AbstractMetaModel) value).setReadOnly(recurse);
}
else if (p instanceof ListUjoProperty) {
if ( AbstractMetaModel.class.isAssignableFrom( ((ListUjoProperty)p).getItemType())) {
for (AbstractMetaModel m : ((ListUjoProperty<AbstractMetaModel,AbstractMetaModel>)p).getList(this) ) {
m.setReadOnly(recurse);
}
}
}
}
}
@Override
public void writeValue(final UjoProperty property, final Object value) {
if (readOnly) {
throw new UnsupportedOperationException("Objec have got read-only state");
}
super.writeValue(property, value);
}
/** Assign a 'valid value' over a default UJO property value only */
protected <UJO extends Ujo, VALUE> void changeDefault
( final UJO ujo
, final UjoProperty<UJO, VALUE> property
, final VALUE value
) {
if (property.isDefault(ujo) && isUsable(value)) {
property.setValue(ujo, value);
}
}
@Override
@SuppressWarnings("unchecked")
public boolean readAuthorization(UjoAction action, UjoProperty property, Object value) {
if (action.getType()==UjoAction.ACTION_XML_EXPORT) {
return !property.isDefault(this);
}
return super.readAuthorization(action, property, value);
}
/** Returns true, if the argument text is not null and not empty. */
protected boolean isUsable(final CharSequence text) {
final boolean result = text!=null && text.length()>0;
return result;
}
/** Returns true, if the argument text is not null and not empty. */
protected boolean isUsable(final Object value) {
final boolean result = value instanceof CharSequence
? isUsable((CharSequence)value)
: value!=null
;
return result;
}
/** Getter based on one UjoProperty */
@SuppressWarnings("unchecked")
public <UJO extends AbstractMetaModel, VALUE> VALUE get ( UjoProperty<UJO, VALUE> property) {
return property.of((UJO) this);
}
}
|
Meta-model initialization changes (II)
|
ujo-core/src-orm/org/ujoframework/orm/AbstractMetaModel.java
|
Meta-model initialization changes (II)
|
|
Java
|
apache-2.0
|
debd0147393d849614d6d8142d2e5d8fa6f30d94
| 0
|
hurricup/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,xfournet/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,ernestp/consulo,lucafavatella/intellij-community,FHannes/intellij-community,signed/intellij-community,kdwink/intellij-community,signed/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,jexp/idea2,vladmm/intellij-community,consulo/consulo,samthor/intellij-community,amith01994/intellij-community,adedayo/intellij-community,kool79/intellij-community,adedayo/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,allotria/intellij-community,xfournet/intellij-community,vladmm/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,ernestp/consulo,jexp/idea2,blademainer/intellij-community,hurricup/intellij-community,adedayo/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,semonte/intellij-community,petteyg/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,diorcety/intellij-community,izonder/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,izonder/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,supersven/intellij-community,apixandru/intellij-community,izonder/intellij-community,allotria/intellij-community,allotria/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,orekyuu/intellij-community,nicolargo/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,semonte/intellij-community,FHannes/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,diorcety/intellij-community,signed/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,wreckJ/intellij-community,joewalnes/idea-community,MER-GROUP/intellij-community,fnouama/intellij-community,robovm/robovm-studio,ryano144/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,blademainer/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,clumsy/intellij-community,xfournet/intellij-community,consulo/consulo,Lekanich/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,supersven/intellij-community,samthor/intellij-community,pwoodworth/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,supersven/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,supersven/intellij-community,slisson/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,youdonghai/intellij-community,samthor/intellij-community,Distrotech/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,slisson/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,allotria/intellij-community,adedayo/intellij-community,fitermay/intellij-community,allotria/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,ryano144/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,da1z/intellij-community,Lekanich/intellij-community,michaelgallacher/intellij-community,consulo/consulo,semonte/intellij-community,idea4bsd/idea4bsd,MichaelNedzelsky/intellij-community,holmes/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,vladmm/intellij-community,da1z/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,dslomov/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,kool79/intellij-community,apixandru/intellij-community,ftomassetti/intellij-community,hurricup/intellij-community,joewalnes/idea-community,jagguli/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,FHannes/intellij-community,da1z/intellij-community,blademainer/intellij-community,vladmm/intellij-community,retomerz/intellij-community,blademainer/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,consulo/consulo,apixandru/intellij-community,joewalnes/idea-community,ahb0327/intellij-community,jexp/idea2,dslomov/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,holmes/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,retomerz/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,ernestp/consulo,clumsy/intellij-community,wreckJ/intellij-community,caot/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,FHannes/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,signed/intellij-community,wreckJ/intellij-community,supersven/intellij-community,asedunov/intellij-community,caot/intellij-community,tmpgit/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,ibinti/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,da1z/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,ibinti/intellij-community,ryano144/intellij-community,ernestp/consulo,fengbaicanhe/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,holmes/intellij-community,izonder/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,da1z/intellij-community,caot/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,kool79/intellij-community,vladmm/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,slisson/intellij-community,suncycheng/intellij-community,diorcety/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,caot/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,izonder/intellij-community,signed/intellij-community,da1z/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,petteyg/intellij-community,da1z/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,signed/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,caot/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,retomerz/intellij-community,amith01994/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,michaelgallacher/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,Lekanich/intellij-community,holmes/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,adedayo/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,signed/intellij-community,adedayo/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,jexp/idea2,ryano144/intellij-community,vladmm/intellij-community,petteyg/intellij-community,dslomov/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,caot/intellij-community,allotria/intellij-community,vladmm/intellij-community,caot/intellij-community,wreckJ/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,izonder/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,clumsy/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,izonder/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,salguarnieri/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,slisson/intellij-community,petteyg/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,kool79/intellij-community,TangHao1987/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,TangHao1987/intellij-community,caot/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,consulo/consulo,MER-GROUP/intellij-community,asedunov/intellij-community,clumsy/intellij-community,diorcety/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,joewalnes/idea-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,dslomov/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,fnouama/intellij-community,holmes/intellij-community,retomerz/intellij-community,jagguli/intellij-community,retomerz/intellij-community,petteyg/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,signed/intellij-community,kdwink/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,ernestp/consulo,akosyakov/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,petteyg/intellij-community,retomerz/intellij-community,asedunov/intellij-community,kool79/intellij-community,joewalnes/idea-community,pwoodworth/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,da1z/intellij-community,nicolargo/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,jagguli/intellij-community,allotria/intellij-community,slisson/intellij-community,ryano144/intellij-community,fnouama/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,kool79/intellij-community,fitermay/intellij-community,ibinti/intellij-community,jexp/idea2,TangHao1987/intellij-community,TangHao1987/intellij-community,jexp/idea2,supersven/intellij-community,vvv1559/intellij-community,slisson/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,petteyg/intellij-community,holmes/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,fitermay/intellij-community,hurricup/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,MichaelNedzelsky/intellij-community,jexp/idea2,pwoodworth/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,jagguli/intellij-community,kdwink/intellij-community,apixandru/intellij-community,izonder/intellij-community,wreckJ/intellij-community,slisson/intellij-community,semonte/intellij-community,dslomov/intellij-community,retomerz/intellij-community,ol-loginov/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,amith01994/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,signed/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,blademainer/intellij-community,petteyg/intellij-community,jagguli/intellij-community,fnouama/intellij-community,mglukhikh/intellij-community,fengbaicanhe/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,tmpgit/intellij-community,holmes/intellij-community,adedayo/intellij-community,semonte/intellij-community,clumsy/intellij-community,asedunov/intellij-community,robovm/robovm-studio,kool79/intellij-community,adedayo/intellij-community,ibinti/intellij-community,asedunov/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,akosyakov/intellij-community,da1z/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,amith01994/intellij-community,consulo/consulo,retomerz/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,kool79/intellij-community,diorcety/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,retomerz/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,caot/intellij-community,samthor/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,izonder/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,jexp/idea2,joewalnes/idea-community,jagguli/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,apixandru/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,semonte/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,robovm/robovm-studio,petteyg/intellij-community,signed/intellij-community,jagguli/intellij-community,joewalnes/idea-community,ibinti/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,ryano144/intellij-community,clumsy/intellij-community,robovm/robovm-studio,fitermay/intellij-community,samthor/intellij-community,caot/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,robovm/robovm-studio,supersven/intellij-community,joewalnes/idea-community,pwoodworth/intellij-community,adedayo/intellij-community,hurricup/intellij-community,allotria/intellij-community,apixandru/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,ernestp/consulo,ahb0327/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,orekyuu/intellij-community,izonder/intellij-community,Lekanich/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,joewalnes/idea-community,ibinti/intellij-community,apixandru/intellij-community,fnouama/intellij-community,hurricup/intellij-community,samthor/intellij-community,tmpgit/intellij-community,kool79/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,alphafoobar/intellij-community,suncycheng/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,samthor/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,FHannes/intellij-community,amith01994/intellij-community,signed/intellij-community,supersven/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,fitermay/intellij-community,kdwink/intellij-community,semonte/intellij-community
|
package com.intellij.execution.testframework.sm.runner;
import com.intellij.execution.Location;
import com.intellij.execution.testframework.*;
import com.intellij.execution.testframework.sm.LocationProviderUtil;
import com.intellij.execution.testframework.sm.runner.states.*;
import com.intellij.execution.testframework.sm.runner.ui.TestsPresentationUtil;
import com.intellij.execution.testframework.ui.PrintableTestProxy;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.ide.util.EditSourceUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.pom.Navigatable;
import com.intellij.testIntegration.TestLocationProvider;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author: Roman Chernyatchik
*/
public class SMTestProxy extends CompositePrintable implements PrintableTestProxy {
private static final Logger LOG = Logger.getInstance(SMTestProxy.class.getName());
private List<SMTestProxy> myChildren;
private SMTestProxy myParent;
private AbstractState myState = NotRunState.getInstance();
private final String myName;
private Integer myDuration = null; // duration is unknown
@Nullable private final String myLocationUrl;
private boolean myDurationIsCached = false; // is used for separating unknown and unset duration
private Printer myPrinter = Printer.DEAF;
private final boolean myIsSuite;
public SMTestProxy(final String testName, final boolean isSuite,
@Nullable final String locationUrl) {
myName = testName;
myIsSuite = isSuite;
myLocationUrl = locationUrl;
}
public boolean isInProgress() {
//final SMTestProxy parent = getParent();
return myState.isInProgress();
}
public boolean isDefect() {
return myState.isDefect();
}
public boolean shouldRun() {
return true;
}
public int getMagnitude() {
// Is used by some of Tests Filters
//WARN: It is Hack, see PoolOfTestStates, API is necessary
return getMagnitudeInfo().getValue();
}
public TestStateInfo.Magnitude getMagnitudeInfo() {
return myState.getMagnitude();
}
public boolean isLeaf() {
return myChildren == null || myChildren.isEmpty();
}
public boolean isPassed() {
return myState.getMagnitude() == TestStateInfo.Magnitude.SKIPPED_INDEX ||
myState.getMagnitude() == TestStateInfo.Magnitude.COMPLETE_INDEX ||
myState.getMagnitude() == TestStateInfo.Magnitude.PASSED_INDEX;
}
public void addChild(final SMTestProxy child) {
if (myChildren == null) {
myChildren = new ArrayList<SMTestProxy>();
}
myChildren.add(child);
// add printable
//
// add link to child's future output in correct place
// actually if after this suite will obtain output
// it will place it after this child and before future child
addLast(child);
// add child
//
//TODO reset children cache
child.setParent(this);
// if parent is being printed then all childs output
// should be also send to the same printer
if (myPrinter != Printer.DEAF) {
child.setPrintLinstener(myPrinter);
}
}
public String getName() {
return myName;
}
@Nullable
public Location getLocation(final Project project) {
//determines location of test proxy
//TODO multiresolve support
if (myLocationUrl == null) {
return null;
}
final String protocolId = LocationProviderUtil.extractProtocol(myLocationUrl);
final String path = LocationProviderUtil.extractPath(myLocationUrl);
if (protocolId != null && path != null) {
for (TestLocationProvider provider : Extensions.getExtensions(TestLocationProvider.EP_NAME)) {
final List<Location> locations = provider.getLocation(protocolId, path, project);
if (!locations.isEmpty()) {
return locations.iterator().next();
}
}
}
return null;
}
@Nullable
public Navigatable getDescriptor(final Location location) {
// by location gets navigatable element.
// It can be file or place in file (e.g. when OPEN_FAILURE_LINE is enabled)
if (location != null) {
return EditSourceUtil.getDescriptor(location.getPsiElement());
}
return null;
}
public boolean isSuite() {
return myIsSuite;
}
public SMTestProxy getParent() {
return myParent;
}
public List<? extends SMTestProxy> getChildren() {
return myChildren != null ? myChildren : Collections.<SMTestProxy>emptyList();
}
public List<SMTestProxy> getAllTests() {
final List<SMTestProxy> allTests = new ArrayList<SMTestProxy>();
allTests.add(this);
for (SMTestProxy child : getChildren()) {
allTests.addAll(child.getAllTests());
}
return allTests;
}
public void setStarted() {
myState = !myIsSuite ? TestInProgressState.TEST : new SuiteInProgressState(this);
}
/**
* Calculates and caches duration of test or suite
* @return null if duration is unknown, otherwise duration value in milliseconds;
*/
@Nullable
public Integer getDuration() {
// Returns duration value for tests
// or cached duration for suites
if (myDurationIsCached || !isSuite()) {
return myDuration;
}
//For suites counts and caches durations of its children. Also it evaluates partial duration,
//i.e. if duration is unknown it will be ignored in summary value.
//If duration for all children is unknown summary duration will be also unknown
//if one of children is ignored - it's duration will be 0 and if child wasn't run,
//then it's duration will be unknown
myDuration = calcSuiteDuration();
myDurationIsCached = true;
return myDuration;
}
/**
* Sets duration of test
* @param duration In milliseconds
*/
public void setDuration(final int duration) {
invalidateCachedDurationForContainerSuites();
if (!isSuite()) {
myDurationIsCached = true;
myDuration = (duration >= 0) ? duration : null;
return;
}
// Not allow to diractly set duration for suites.
// It should be the sum of children. This requirement is only
// for safety of current model and may be changed
LOG.warn("Unsupported operation");
}
public void setFinished() {
if (myState.isFinal()) {
// we shouldn't fire new printable because final state
// has been already fired
return;
}
if (!isSuite()) {
// if isn't in other finished state (ignored, failed or passed)
myState = TestPassedState.INSTACE;
} else {
//Test Suite
myState = determineSuiteStateOnFinished();
}
// prints final state additional info
fireOnNewPrintable(myState);
}
public void setTestFailed(@NotNull final String localizedMessage,
@NotNull final String stackTrace, final boolean testError) {
myState = testError
? new TestErrorState(localizedMessage, stackTrace)
: new TestFailedState(localizedMessage, stackTrace);
fireOnNewPrintable(myState);
}
public void setTestIgnored(@NotNull final String ignoreComment,
@Nullable final String stackTrace) {
myState = new TestIgnoredState(ignoreComment, stackTrace);
fireOnNewPrintable(myState);
}
public void setParent(@Nullable final SMTestProxy parent) {
myParent = parent;
}
public List<? extends SMTestProxy> getChildren(@Nullable final Filter filter) {
final List<? extends SMTestProxy> allChildren = getChildren();
if (filter == Filter.NO_FILTER || filter == null) {
return allChildren;
}
final List<SMTestProxy> selectedChildren = new ArrayList<SMTestProxy>();
for (SMTestProxy child : allChildren) {
if (filter.shouldAccept(child)) {
selectedChildren.add(child);
}
}
if ((selectedChildren.isEmpty())) {
return Collections.<SMTestProxy>emptyList();
}
return selectedChildren;
}
public boolean wasLaunched() {
return myState.wasLaunched();
}
public boolean isRoot() {
return getParent() == null;
}
public void setPrintLinstener(final Printer printer) {
myPrinter = printer;
if (myChildren == null) {
return;
}
for (ChangingPrintable child : myChildren) {
child.setPrintLinstener(printer);
}
}
/**
* Prints this proxy and all its children on given printer
* @param printer Printer
*/
public void printOn(final Printer printer) {
super.printOn(printer);
//Tests State, that provide and formats additional output
myState.printOn(printer);
}
/**
* Stores printable information in internal buffer and notifies
* proxy's printer about new text available
* @param printable Printable info
*/
@Override
public void addLast(final Printable printable) {
super.addLast(printable);
fireOnNewPrintable(printable);
}
public void addStdOutput(final String output, final Key outputType) {
addLast(new Printable() {
public void printOn(final Printer printer) {
printer.print(output, ConsoleViewContentType.getConsoleViewType(outputType));
}
});
}
public void addStdErr(final String output) {
addLast(new Printable() {
public void printOn(final Printer printer) {
printer.print(output, ConsoleViewContentType.ERROR_OUTPUT);
}
});
}
public void addSystemOutput(final String output) {
addLast(new Printable() {
public void printOn(final Printer printer) {
printer.print(output, ConsoleViewContentType.SYSTEM_OUTPUT);
}
});
}
private void fireOnNewPrintable(final Printable printable) {
myPrinter.onNewAvailable(printable);
}
@NotNull
public String getPresentableName() {
return TestsPresentationUtil.getPresentableName(this);
}
@Override
public String toString() {
return getPresentableName();
}
/**
* Process was terminated
*/
public void setTerminated() {
if (myState.isFinal()) {
return;
}
myState = TerminatedState.INSTANCE;
final List<? extends SMTestProxy> children = getChildren();
for (SMTestProxy child : children) {
child.setTerminated();
}
fireOnNewPrintable(myState);
}
public boolean wasTerminated() {
return myState.wasTerminated();
}
@Nullable
protected String getLocationUrl() {
return myLocationUrl;
}
/**
* Check if suite contains error tests or suites
* @return True if contains
*/
private boolean containsErrorTests() {
final List<? extends SMTestProxy> children = getChildren();
for (SMTestProxy child : children) {
if (child.getMagnitudeInfo() == TestStateInfo.Magnitude.ERROR_INDEX) {
return true;
}
}
return false;
}
private boolean containsFailedTests() {
final List<? extends SMTestProxy> children = getChildren();
for (SMTestProxy child : children) {
if (child.getMagnitudeInfo() == TestStateInfo.Magnitude.FAILED_INDEX) {
return true;
}
}
return false;
}
/**
* Determines site state after it has been finished
* @return New state
*/
private AbstractState determineSuiteStateOnFinished() {
final AbstractState state;
if (isLeaf()) {
state = SuiteFinishedState.EMPTY_SUITE;
} else {
if (isDefect()) {
// Test suit contains errors if at least one of its tests contains error
if (containsErrorTests()) {
state = SuiteFinishedState.ERROR_SUITE;
}else {
// if suite contains failed tests - all suite should be
// consider as failed
state = containsFailedTests()
? SuiteFinishedState.FAILED_SUITE
: SuiteFinishedState.WITH_IGNORED_TESTS_SUITE;
}
} else {
state = SuiteFinishedState.PASSED_SUITE;
}
}
return state;
}
@Nullable
private Integer calcSuiteDuration() {
int partialDuration = 0;
boolean durationOfChildrenIsUnknown = true;
for (SMTestProxy child : getChildren()) {
final Integer duration = child.getDuration();
if (duration != null) {
durationOfChildrenIsUnknown = false;
partialDuration += duration.intValue();
}
}
// Lets convert partial duration in integer object. Negative partial duration
// means that duration of all children is unknown
return durationOfChildrenIsUnknown ? null : partialDuration;
}
/**
* Recursively invalidates cached duration for container(parent) suites
*/
private void invalidateCachedDurationForContainerSuites() {
// Invalidates duration of this suite
myDuration = null;
myDurationIsCached = false;
// Invalidates duration of container suite
final SMTestProxy containerSuite = getParent();
if (containerSuite != null) {
containerSuite.invalidateCachedDurationForContainerSuites();
}
}
}
|
smRunner/src/com/intellij/execution/testframework/sm/runner/SMTestProxy.java
|
package com.intellij.execution.testframework.sm.runner;
import com.intellij.execution.Location;
import com.intellij.execution.testframework.*;
import com.intellij.execution.testframework.sm.LocationProviderUtil;
import com.intellij.execution.testframework.sm.runner.states.*;
import com.intellij.execution.testframework.sm.runner.ui.TestsPresentationUtil;
import com.intellij.execution.testframework.ui.PrintableTestProxy;
import com.intellij.execution.ui.ConsoleViewContentType;
import com.intellij.ide.util.EditSourceUtil;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.pom.Navigatable;
import com.intellij.testIntegration.TestLocationProvider;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author: Roman Chernyatchik
*/
public class SMTestProxy extends CompositePrintable implements PrintableTestProxy {
private static final Logger LOG = Logger.getInstance(SMTestProxy.class.getName());
private List<SMTestProxy> myChildren;
private SMTestProxy myParent;
private AbstractState myState = NotRunState.getInstance();
private final String myName;
private Integer myDuration = null; // duration is unknown
@Nullable private final String myLocationUrl;
private boolean myDurationIsCached = false; // is used for separating unknown and unset duration
private Printer myPrinter = Printer.DEAF;
private final boolean myIsSuite;
public SMTestProxy(final String testName, final boolean isSuite,
@Nullable final String locationUrl) {
myName = testName;
myIsSuite = isSuite;
myLocationUrl = locationUrl;
}
public boolean isInProgress() {
//final SMTestProxy parent = getParent();
return myState.isInProgress();
}
public boolean isDefect() {
return myState.isDefect();
}
public boolean shouldRun() {
return true;
}
public int getMagnitude() {
// Is used by some of Tests Filters
//WARN: It is Hack, see PoolOfTestStates, API is necessary
return getMagnitudeInfo().getValue();
}
public TestStateInfo.Magnitude getMagnitudeInfo() {
return myState.getMagnitude();
}
public boolean isLeaf() {
return myChildren == null || myChildren.isEmpty();
}
public boolean isPassed() {
return myState.getMagnitude() == TestStateInfo.Magnitude.SKIPPED_INDEX || myState.getMagnitude() != TestStateInfo.Magnitude.COMPLETE_INDEX;
}
public void addChild(final SMTestProxy child) {
if (myChildren == null) {
myChildren = new ArrayList<SMTestProxy>();
}
myChildren.add(child);
// add printable
//
// add link to child's future output in correct place
// actually if after this suite will obtain output
// it will place it after this child and before future child
addLast(child);
// add child
//
//TODO reset children cache
child.setParent(this);
// if parent is being printed then all childs output
// should be also send to the same printer
if (myPrinter != Printer.DEAF) {
child.setPrintLinstener(myPrinter);
}
}
public String getName() {
return myName;
}
@Nullable
public Location getLocation(final Project project) {
//determines location of test proxy
//TODO multiresolve support
if (myLocationUrl == null) {
return null;
}
final String protocolId = LocationProviderUtil.extractProtocol(myLocationUrl);
final String path = LocationProviderUtil.extractPath(myLocationUrl);
if (protocolId != null && path != null) {
for (TestLocationProvider provider : Extensions.getExtensions(TestLocationProvider.EP_NAME)) {
final List<Location> locations = provider.getLocation(protocolId, path, project);
if (!locations.isEmpty()) {
return locations.iterator().next();
}
}
}
return null;
}
@Nullable
public Navigatable getDescriptor(final Location location) {
// by location gets navigatable element.
// It can be file or place in file (e.g. when OPEN_FAILURE_LINE is enabled)
if (location != null) {
return EditSourceUtil.getDescriptor(location.getPsiElement());
}
return null;
}
public boolean isSuite() {
return myIsSuite;
}
public SMTestProxy getParent() {
return myParent;
}
public List<? extends SMTestProxy> getChildren() {
return myChildren != null ? myChildren : Collections.<SMTestProxy>emptyList();
}
public List<SMTestProxy> getAllTests() {
final List<SMTestProxy> allTests = new ArrayList<SMTestProxy>();
allTests.add(this);
for (SMTestProxy child : getChildren()) {
allTests.addAll(child.getAllTests());
}
return allTests;
}
public void setStarted() {
myState = !myIsSuite ? TestInProgressState.TEST : new SuiteInProgressState(this);
}
/**
* Calculates and caches duration of test or suite
* @return null if duration is unknown, otherwise duration value in milliseconds;
*/
@Nullable
public Integer getDuration() {
// Returns duration value for tests
// or cached duration for suites
if (myDurationIsCached || !isSuite()) {
return myDuration;
}
//For suites counts and caches durations of its children. Also it evaluates partial duration,
//i.e. if duration is unknown it will be ignored in summary value.
//If duration for all children is unknown summary duration will be also unknown
//if one of children is ignored - it's duration will be 0 and if child wasn't run,
//then it's duration will be unknown
myDuration = calcSuiteDuration();
myDurationIsCached = true;
return myDuration;
}
/**
* Sets duration of test
* @param duration In milliseconds
*/
public void setDuration(final int duration) {
invalidateCachedDurationForContainerSuites();
if (!isSuite()) {
myDurationIsCached = true;
myDuration = (duration >= 0) ? duration : null;
return;
}
// Not allow to diractly set duration for suites.
// It should be the sum of children. This requirement is only
// for safety of current model and may be changed
LOG.warn("Unsupported operation");
}
public void setFinished() {
if (myState.isFinal()) {
// we shouldn't fire new printable because final state
// has been already fired
return;
}
if (!isSuite()) {
// if isn't in other finished state (ignored, failed or passed)
myState = TestPassedState.INSTACE;
} else {
//Test Suite
myState = determineSuiteStateOnFinished();
}
// prints final state additional info
fireOnNewPrintable(myState);
}
public void setTestFailed(@NotNull final String localizedMessage,
@NotNull final String stackTrace, final boolean testError) {
myState = testError
? new TestErrorState(localizedMessage, stackTrace)
: new TestFailedState(localizedMessage, stackTrace);
fireOnNewPrintable(myState);
}
public void setTestIgnored(@NotNull final String ignoreComment,
@Nullable final String stackTrace) {
myState = new TestIgnoredState(ignoreComment, stackTrace);
fireOnNewPrintable(myState);
}
public void setParent(@Nullable final SMTestProxy parent) {
myParent = parent;
}
public List<? extends SMTestProxy> getChildren(@Nullable final Filter filter) {
final List<? extends SMTestProxy> allChildren = getChildren();
if (filter == Filter.NO_FILTER || filter == null) {
return allChildren;
}
final List<SMTestProxy> selectedChildren = new ArrayList<SMTestProxy>();
for (SMTestProxy child : allChildren) {
if (filter.shouldAccept(child)) {
selectedChildren.add(child);
}
}
if ((selectedChildren.isEmpty())) {
return Collections.<SMTestProxy>emptyList();
}
return selectedChildren;
}
public boolean wasLaunched() {
return myState.wasLaunched();
}
public boolean isRoot() {
return getParent() == null;
}
public void setPrintLinstener(final Printer printer) {
myPrinter = printer;
if (myChildren == null) {
return;
}
for (ChangingPrintable child : myChildren) {
child.setPrintLinstener(printer);
}
}
/**
* Prints this proxy and all its children on given printer
* @param printer Printer
*/
public void printOn(final Printer printer) {
super.printOn(printer);
//Tests State, that provide and formats additional output
myState.printOn(printer);
}
/**
* Stores printable information in internal buffer and notifies
* proxy's printer about new text available
* @param printable Printable info
*/
@Override
public void addLast(final Printable printable) {
super.addLast(printable);
fireOnNewPrintable(printable);
}
public void addStdOutput(final String output, final Key outputType) {
addLast(new Printable() {
public void printOn(final Printer printer) {
printer.print(output, ConsoleViewContentType.getConsoleViewType(outputType));
}
});
}
public void addStdErr(final String output) {
addLast(new Printable() {
public void printOn(final Printer printer) {
printer.print(output, ConsoleViewContentType.ERROR_OUTPUT);
}
});
}
public void addSystemOutput(final String output) {
addLast(new Printable() {
public void printOn(final Printer printer) {
printer.print(output, ConsoleViewContentType.SYSTEM_OUTPUT);
}
});
}
private void fireOnNewPrintable(final Printable printable) {
myPrinter.onNewAvailable(printable);
}
@NotNull
public String getPresentableName() {
return TestsPresentationUtil.getPresentableName(this);
}
@Override
public String toString() {
return getPresentableName();
}
/**
* Process was terminated
*/
public void setTerminated() {
if (myState.isFinal()) {
return;
}
myState = TerminatedState.INSTANCE;
final List<? extends SMTestProxy> children = getChildren();
for (SMTestProxy child : children) {
child.setTerminated();
}
fireOnNewPrintable(myState);
}
public boolean wasTerminated() {
return myState.wasTerminated();
}
@Nullable
protected String getLocationUrl() {
return myLocationUrl;
}
/**
* Check if suite contains error tests or suites
* @return True if contains
*/
private boolean containsErrorTests() {
final List<? extends SMTestProxy> children = getChildren();
for (SMTestProxy child : children) {
if (child.getMagnitudeInfo() == TestStateInfo.Magnitude.ERROR_INDEX) {
return true;
}
}
return false;
}
private boolean containsFailedTests() {
final List<? extends SMTestProxy> children = getChildren();
for (SMTestProxy child : children) {
if (child.getMagnitudeInfo() == TestStateInfo.Magnitude.FAILED_INDEX) {
return true;
}
}
return false;
}
/**
* Determines site state after it has been finished
* @return New state
*/
private AbstractState determineSuiteStateOnFinished() {
final AbstractState state;
if (isLeaf()) {
state = SuiteFinishedState.EMPTY_SUITE;
} else {
if (isDefect()) {
// Test suit contains errors if at least one of its tests contains error
if (containsErrorTests()) {
state = SuiteFinishedState.ERROR_SUITE;
}else {
// if suite contains failed tests - all suite should be
// consider as failed
state = containsFailedTests()
? SuiteFinishedState.FAILED_SUITE
: SuiteFinishedState.WITH_IGNORED_TESTS_SUITE;
}
} else {
state = SuiteFinishedState.PASSED_SUITE;
}
}
return state;
}
@Nullable
private Integer calcSuiteDuration() {
int partialDuration = 0;
boolean durationOfChildrenIsUnknown = true;
for (SMTestProxy child : getChildren()) {
final Integer duration = child.getDuration();
if (duration != null) {
durationOfChildrenIsUnknown = false;
partialDuration += duration.intValue();
}
}
// Lets convert partial duration in integer object. Negative partial duration
// means that duration of all children is unknown
return durationOfChildrenIsUnknown ? null : partialDuration;
}
/**
* Recursively invalidates cached duration for container(parent) suites
*/
private void invalidateCachedDurationForContainerSuites() {
// Invalidates duration of this suite
myDuration = null;
myDurationIsCached = false;
// Invalidates duration of container suite
final SMTestProxy containerSuite = getParent();
if (containerSuite != null) {
containerSuite.invalidateCachedDurationForContainerSuites();
}
}
}
|
fix SMTestRunnerResultsFormTest.testRuby_1767
|
smRunner/src/com/intellij/execution/testframework/sm/runner/SMTestProxy.java
|
fix SMTestRunnerResultsFormTest.testRuby_1767
|
|
Java
|
apache-2.0
|
7ee47691a1551f4161742f285c145bf2928c5952
| 0
|
redhatanalytics/dstream-amqp,redhatanalytics/dstream-amqp
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.amqp;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.*;
import java.util.Map.Entry;
/**
* Java test suite for the AMQP input stream
*/
public class JavaAMQPBrokerStreamSuite {
private Duration batchDuration = new Duration(1000);
private String master = "local[2]";
private String appName = this.getClass().getSimpleName();
private String address = "my_address";
private String checkpointDir = "/tmp/spark-streaming-amqp-tests";
private SparkConf conf = null;
private JavaStreamingContext jssc = null;
private AMQPTestUtils amqpTestUtils = null;
@Before
public void setup() {
this.conf = new SparkConf().setMaster(this.master).setAppName(this.appName);
conf.set("spark.streaming.receiver.writeAheadLog.enable", "true");
this.jssc = new JavaStreamingContext(this.conf, this.batchDuration);
this.jssc.checkpoint(checkpointDir);
this.amqpTestUtils = new AMQPTestUtils();
this.amqpTestUtils.setup();
this.amqpTestUtils.startBroker();
}
@After
public void teardown() {
this.amqpTestUtils.stopBroker();
if (this.jssc != null) {
this.jssc.stop();
}
if (this.amqpTestUtils != null) {
this.amqpTestUtils.teardown();
}
}
@Test
public void testAMQPReceiveSimpleBodyString() {
Function converter = new JavaAMQPBodyFunction<String>();
String sendMessage = "Spark Streaming & AMQP";
JavaReceiverInputDStream<String> receiveStream =
AMQPUtils.createStream(this.jssc,
this.amqpTestUtils.host(),
this.amqpTestUtils.port(),
this.address, converter, StorageLevel.MEMORY_ONLY());
List<String> receivedMessage = new ArrayList<>();
receiveStream.foreachRDD(rdd -> {
if (!rdd.isEmpty()) {
receivedMessage.add(rdd.first());
}
});
jssc.start();
this.amqpTestUtils.sendSimpleMessage(address, sendMessage);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
assert(receivedMessage.get(0).equals(sendMessage));
jssc.stop();
}
@Test
public void testAMQPReceiveListBody() {
Function converter = new JavaAMQPJsonFunction();
List<Object> list = new ArrayList<>();
list.add("a string");
list.add(1);
list.add(2);
JavaReceiverInputDStream<String> receiveStream =
AMQPUtils.createStream(this.jssc,
this.amqpTestUtils.host(),
this.amqpTestUtils.port(),
this.address, converter, StorageLevel.MEMORY_ONLY());
JavaDStream<String> listStream = receiveStream.map(jsonMsg -> {
ObjectMapper mapper = new ObjectMapper();
List<String> listFinal = new ArrayList<>();
// get an itarator on "section" that is actually an array
Iterator<JsonNode> iterator = mapper.readTree(jsonMsg).get("body").get("section").elements();
while(iterator.hasNext()) {
listFinal.add(iterator.next().asText());
}
return StringUtils.join(listFinal, ',');
});
List<String> receivedMessage = new ArrayList<>();
listStream.foreachRDD(rdd -> {
if (!rdd.isEmpty()) {
receivedMessage.add(rdd.first());
}
});
jssc.start();
this.amqpTestUtils.sendComplexMessage(address, list);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
assert(receivedMessage.get(0).equals(StringUtils.join(list, ',')));
jssc.stop();
}
@Test
public void testAMQPReceiveMapBody() {
Function converter = new JavaAMQPJsonFunction();
Map<Object, Object> map = new HashMap<>();
map.put("field_a", "a string");
map.put("field_b", 1);
JavaReceiverInputDStream<String> receiveStream =
AMQPUtils.createStream(this.jssc,
this.amqpTestUtils.host(),
this.amqpTestUtils.port(),
this.address, converter, StorageLevel.MEMORY_ONLY());
JavaDStream<String> mapStream = receiveStream.map(jsonMsg -> {
ObjectMapper mapper = new ObjectMapper();
List<String> listFinal = new ArrayList<>();
// get an itarator on all fields of "section" that is actually a map
Iterator<Entry<String, JsonNode>> iterator = mapper.readTree(jsonMsg).get("body").get("section").fields();
while(iterator.hasNext()) {
Entry<String, JsonNode> entry = iterator.next();
listFinal.add(entry.getKey() + "=" + entry.getValue().asText());
}
return StringUtils.join(listFinal, ',');
});
List<String> receivedMessage = new ArrayList<>();
mapStream.foreachRDD(rdd -> {
if (!rdd.isEmpty()) {
receivedMessage.add(rdd.first());
}
});
jssc.start();
this.amqpTestUtils.sendComplexMessage(address, map);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
StringBuilder sbuilder = new StringBuilder();
for (Entry<Object, Object> entry: map.entrySet()) {
sbuilder.append(entry.getKey() + "=" + entry.getValue() + ",");
}
sbuilder.deleteCharAt(sbuilder.length() - 1);
assert(receivedMessage.get(0).equals(sbuilder.toString()));
jssc.stop();
}
@Test
public void testAMQPReceiveArrayBody() {
Function converter = new JavaAMQPJsonFunction();
Object[] array = { 1, 2 };
JavaReceiverInputDStream<String> receiveStream =
AMQPUtils.createStream(this.jssc,
this.amqpTestUtils.host(),
this.amqpTestUtils.port(),
this.address, converter, StorageLevel.MEMORY_ONLY());
JavaDStream<String> listStream = receiveStream.map(jsonMsg -> {
ObjectMapper mapper = new ObjectMapper();
List<String> listFinal = new ArrayList<>();
// get an itarator on "section" that is actually an array
Iterator<JsonNode> iterator = mapper.readTree(jsonMsg).get("body").get("section").elements();
while(iterator.hasNext()) {
listFinal.add(iterator.next().asText());
}
return StringUtils.join(listFinal, ',');
});
List<String> receivedMessage = new ArrayList<>();
listStream.foreachRDD(rdd -> {
if (!rdd.isEmpty()) {
receivedMessage.add(rdd.first());
}
});
jssc.start();
this.amqpTestUtils.sendComplexMessage(address, array);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
assert(receivedMessage.get(0).equals(StringUtils.join(array, ',')));
jssc.stop();
}
@Test
public void testAMQPReceiveBinaryBody() {
Function converter = new JavaAMQPJsonFunction();
String sendMessage = "Spark Streaming & AMQP";
JavaReceiverInputDStream<String> receiveStream =
AMQPUtils.createStream(this.jssc,
this.amqpTestUtils.host(),
this.amqpTestUtils.port(),
this.address, converter, StorageLevel.MEMORY_ONLY());
JavaDStream<String> binaryStream = receiveStream.map(jsonMsg -> {
ObjectMapper mapper = new ObjectMapper();
String body = new String(Base64.getDecoder().decode(mapper.readTree(jsonMsg).get("body").get("section").asText()));
return body;
});
List<String> receivedMessage = new ArrayList<>();
binaryStream.foreachRDD(rdd -> {
if (!rdd.isEmpty()) {
receivedMessage.add(rdd.first());
}
});
jssc.start();
this.amqpTestUtils.sendBinaryMessage(address, sendMessage.getBytes());
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
assert(receivedMessage.get(0).equals(sendMessage));
jssc.stop();
}
}
|
src/test/java/org/apache/spark/streaming/amqp/JavaAMQPBrokerStreamSuite.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.amqp;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang3.StringUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.*;
import java.util.Map.Entry;
/**
* Java test suite for the AMQP input stream
*/
public class JavaAMQPBrokerStreamSuite {
private Duration batchDuration = new Duration(1000);
private String master = "local[2]";
private String appName = this.getClass().getSimpleName();
private String address = "my_address";
private String checkpointDir = "/tmp/spark-streaming-amqp-tests";
private SparkConf conf = null;
private JavaStreamingContext jssc = null;
private AMQPTestUtils amqpTestUtils = null;
@Before
public void setup() {
this.conf = new SparkConf().setMaster(this.master).setAppName(this.appName);
conf.set("spark.streaming.receiver.writeAheadLog.enable", "true");
this.jssc = new JavaStreamingContext(this.conf, this.batchDuration);
this.jssc.checkpoint(checkpointDir);
this.amqpTestUtils = new AMQPTestUtils();
this.amqpTestUtils.setup();
this.amqpTestUtils.startBroker();
}
@After
public void teardown() {
this.amqpTestUtils.stopBroker();
if (this.jssc != null) {
this.jssc.stop();
}
if (this.amqpTestUtils != null) {
this.amqpTestUtils.teardown();
}
}
@Test
public void testAMQPReceiveSimpleBodyString() {
Function converter = new JavaAMQPBodyFunction<String>();
String sendMessage = "Spark Streaming & AMQP";
JavaReceiverInputDStream<String> receiveStream =
AMQPUtils.createStream(this.jssc,
this.amqpTestUtils.host(),
this.amqpTestUtils.port(),
this.address, converter, StorageLevel.MEMORY_ONLY());
List<String> receivedMessage = new ArrayList<>();
receiveStream.foreachRDD(rdd -> {
if (!rdd.isEmpty()) {
receivedMessage.add(rdd.first());
}
});
jssc.start();
this.amqpTestUtils.sendSimpleMessage(address, sendMessage);
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
assert(receivedMessage.get(0).equals(sendMessage));
jssc.stop();
}
@Test
public void testAMQPReceiveListBody() {
Function converter = new JavaAMQPJsonFunction();
List<Object> list = new ArrayList<>();
list.add("a string");
list.add(1);
list.add(2);
JavaReceiverInputDStream<String> receiveStream =
AMQPUtils.createStream(this.jssc,
this.amqpTestUtils.host(),
this.amqpTestUtils.port(),
this.address, converter, StorageLevel.MEMORY_ONLY());
JavaDStream<String> listStream = receiveStream.map(jsonMsg -> {
ObjectMapper mapper = new ObjectMapper();
List<String> listFinal = new ArrayList<>();
// get an itarator on "section" that is actually an array
Iterator<JsonNode> iterator = mapper.readTree(jsonMsg).get("body").get("section").elements();
while(iterator.hasNext()) {
listFinal.add(iterator.next().asText());
}
return StringUtils.join(listFinal, ',');
});
List<String> receivedMessage = new ArrayList<>();
listStream.foreachRDD(rdd -> {
if (!rdd.isEmpty()) {
receivedMessage.add(rdd.first());
}
});
jssc.start();
this.amqpTestUtils.sendComplexMessage(address, list);
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
e.printStackTrace();
}
assert(receivedMessage.get(0).equals(StringUtils.join(list, ',')));
jssc.stop();
}
@Test
public void testAMQPReceiveMapBody() {
Function converter = new JavaAMQPJsonFunction();
Map<Object, Object> map = new HashMap<>();
map.put("field_a", "a string");
map.put("field_b", 1);
JavaReceiverInputDStream<String> receiveStream =
AMQPUtils.createStream(this.jssc,
this.amqpTestUtils.host(),
this.amqpTestUtils.port(),
this.address, converter, StorageLevel.MEMORY_ONLY());
JavaDStream<String> mapStream = receiveStream.map(jsonMsg -> {
ObjectMapper mapper = new ObjectMapper();
List<String> listFinal = new ArrayList<>();
// get an itarator on all fields of "section" that is actually a map
Iterator<Entry<String, JsonNode>> iterator = mapper.readTree(jsonMsg).get("body").get("section").fields();
while(iterator.hasNext()) {
Entry<String, JsonNode> entry = iterator.next();
listFinal.add(entry.getKey() + "=" + entry.getValue().asText());
}
return StringUtils.join(listFinal, ',');
});
List<String> receivedMessage = new ArrayList<>();
mapStream.foreachRDD(rdd -> {
if (!rdd.isEmpty()) {
receivedMessage.add(rdd.first());
}
});
jssc.start();
this.amqpTestUtils.sendComplexMessage(address, map);
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
e.printStackTrace();
}
StringBuilder sbuilder = new StringBuilder();
for (Entry<Object, Object> entry: map.entrySet()) {
sbuilder.append(entry.getKey() + "=" + entry.getValue() + ",");
}
sbuilder.deleteCharAt(sbuilder.length() - 1);
assert(receivedMessage.get(0).equals(sbuilder.toString()));
jssc.stop();
}
@Test
public void testAMQPReceiveArrayBody() {
Function converter = new JavaAMQPJsonFunction();
Object[] array = { 1, 2 };
JavaReceiverInputDStream<String> receiveStream =
AMQPUtils.createStream(this.jssc,
this.amqpTestUtils.host(),
this.amqpTestUtils.port(),
this.address, converter, StorageLevel.MEMORY_ONLY());
JavaDStream<String> listStream = receiveStream.map(jsonMsg -> {
ObjectMapper mapper = new ObjectMapper();
List<String> listFinal = new ArrayList<>();
// get an itarator on "section" that is actually an array
Iterator<JsonNode> iterator = mapper.readTree(jsonMsg).get("body").get("section").elements();
while(iterator.hasNext()) {
listFinal.add(iterator.next().asText());
}
return StringUtils.join(listFinal, ',');
});
List<String> receivedMessage = new ArrayList<>();
listStream.foreachRDD(rdd -> {
if (!rdd.isEmpty()) {
receivedMessage.add(rdd.first());
}
});
jssc.start();
this.amqpTestUtils.sendComplexMessage(address, array);
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
e.printStackTrace();
}
assert(receivedMessage.get(0).equals(StringUtils.join(array, ',')));
jssc.stop();
}
@Test
public void testAMQPReceiveBinaryBody() {
Function converter = new JavaAMQPJsonFunction();
String sendMessage = "Spark Streaming & AMQP";
JavaReceiverInputDStream<String> receiveStream =
AMQPUtils.createStream(this.jssc,
this.amqpTestUtils.host(),
this.amqpTestUtils.port(),
this.address, converter, StorageLevel.MEMORY_ONLY());
JavaDStream<String> binaryStream = receiveStream.map(jsonMsg -> {
ObjectMapper mapper = new ObjectMapper();
String body = new String(Base64.getDecoder().decode(mapper.readTree(jsonMsg).get("body").get("section").asText()));
return body;
});
List<String> receivedMessage = new ArrayList<>();
binaryStream.foreachRDD(rdd -> {
if (!rdd.isEmpty()) {
receivedMessage.add(rdd.first());
}
});
jssc.start();
this.amqpTestUtils.sendBinaryMessage(address, sendMessage.getBytes());
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
e.printStackTrace();
}
assert(receivedMessage.get(0).equals(sendMessage));
jssc.stop();
}
}
|
Incremented test duration for all Java tests
|
src/test/java/org/apache/spark/streaming/amqp/JavaAMQPBrokerStreamSuite.java
|
Incremented test duration for all Java tests
|
|
Java
|
apache-2.0
|
f5551e4e0cd6476ec0e5b00f9053b923f473dbfb
| 0
|
seven332/EhViewer,seven332/EhViewer
|
/*
* Copyright (C) 2014-2015 Hippo Seven
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hippo.widget;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.ColorStateList;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.GradientDrawable;
import android.os.Build;
import android.util.AttributeSet;
import android.view.View;
import com.hippo.ehviewer.R;
import com.hippo.util.UiUtils;
import com.hippo.util.ViewUtils;
public class FloatingActionButton extends View {
private static final int DRAWABLE_WIDTH = UiUtils.dp2pix(24);
private GasketDrawer mGasketDrawer;
private Drawable mDrawable;
public FloatingActionButton(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs, 0, 0);
}
public FloatingActionButton(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context, attrs, defStyleAttr, 0);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public FloatingActionButton(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init(context, attrs, defStyleAttr, defStyleRes);
}
private void init(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
TypedArray a;
a = context.obtainStyledAttributes(attrs, R.styleable.FloatingActionButton, defStyleAttr, defStyleRes);
int bgColor = a.getColor(R.styleable.FloatingActionButton_fabColor, Color.BLACK);
mDrawable = a.getDrawable(R.styleable.FloatingActionButton_fabDrawable);
a.recycle();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
mGasketDrawer = new GasketDrawerLollipop(this, bgColor);
} else {
mGasketDrawer = new GasketDrawerOld(this, bgColor);
}
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
int[] stateSet = getDrawableState();
mGasketDrawer.onStateChange(stateSet);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
setMeasuredDimension(ViewUtils.getSuitableSize(getSuggestedMinimumWidth(), widthMeasureSpec),
ViewUtils.getSuitableSize(getSuggestedMinimumHeight(), heightMeasureSpec));
}
protected void onDraw(Canvas canvas) {
mGasketDrawer.draw(canvas);
if (mDrawable != null) {
int left;
int top;
int right;
int bottom;
int width = getWidth();
int height = getHeight();
int halfDrawableWidth = DRAWABLE_WIDTH / 2;
left = width / 2 - halfDrawableWidth;
top = height / 2 - halfDrawableWidth;
right = left + DRAWABLE_WIDTH;
bottom = top + DRAWABLE_WIDTH;
mDrawable.setBounds(left, top, right, bottom);
mDrawable.draw(canvas);
}
}
public void setColor(int color) {
mGasketDrawer.setColor(color);
}
public void setDrawable(Drawable drawable) {
mDrawable = drawable;
invalidate();
}
interface GasketDrawer {
void onStateChange(int[] stateSet);
void draw(Canvas canvas);
void setColor(int color);
}
static class GasketDrawerOld implements GasketDrawer {
private static final int SHADOW_RADIOUS = UiUtils.dp2pix(3);
private static final int SHADOW_OFFSET_Y = UiUtils.dp2pix(1);
private static final int GASKET_PADDING = SHADOW_RADIOUS + SHADOW_OFFSET_Y; // 4dp
private static final int SHADOW_COLOR = 0x43000000;
private View mView;
private int mColor;
private int mDarkerColor;
private Paint mPaint;
private RectF mBounds;
private boolean mIsDark;
private GasketDrawerOld(View view, int color) {
mView = view;
mBounds = new RectF();
mColor = color;
mDarkerColor = UiUtils.getDarkerColor(color);
mPaint = new Paint(Paint.ANTI_ALIAS_FLAG | Paint.DITHER_FLAG);
mPaint.setColor(mColor);
mPaint.setStyle(Paint.Style.FILL);
mPaint.setShadowLayer(SHADOW_RADIOUS, 0, SHADOW_OFFSET_Y, SHADOW_COLOR);
ViewUtils.removeHardwareAccelerationSupport(mView);
}
public void onStateChange(int[] stateSet) {
boolean enabled = false;
boolean pressed = false;
boolean focused = false;
for (int state : stateSet) {
if (state == android.R.attr.state_enabled) {
enabled = true;
}
if (state == android.R.attr.state_focused) {
focused = true;
}
if (state == android.R.attr.state_pressed) {
pressed = true;
}
}
setDark(!enabled || pressed || focused);
}
private void setDark(boolean isDark) {
if (mIsDark != isDark) {
mIsDark = isDark;
updateColor();
}
}
private void updateColor() {
mPaint.setColor(mIsDark ? mDarkerColor : mColor);
invalidateSelf();
}
private void invalidateSelf() {
mView.invalidate();
}
public void draw(Canvas canvas) {
mBounds.left = GASKET_PADDING;
mBounds.top = GASKET_PADDING;
mBounds.right = mView.getWidth() - GASKET_PADDING;
mBounds.bottom = mView.getHeight() - GASKET_PADDING;
canvas.drawOval(mBounds, mPaint);
}
public void setColor(int color) {
mColor = color;
mDarkerColor = UiUtils.getDarkerColor(color);
updateColor();
}
}
static class GasketDrawerLollipop implements GasketDrawer {
private static final int ELEVATION = UiUtils.dp2pix(4);
private static final int[][] STATES = new int[][]{
new int[]{-android.R.attr.state_enabled},
new int[]{android.R.attr.state_pressed},
new int[]{android.R.attr.state_focused},
new int[]{}
};
private View mView;
private int mColor;
private GradientDrawable mGradientDrawable;
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public GasketDrawerLollipop(View view, int color) {
mView = view;
mColor = color;
int darkColor = UiUtils.getDarkerColor(color);
mGradientDrawable = new GradientDrawable();
mGradientDrawable.setShape(GradientDrawable.OVAL);
mGradientDrawable.setColor(new ColorStateList(STATES,
new int[] {darkColor, darkColor, darkColor, color}));
mView.setBackground(mGradientDrawable);
mView.setElevation(ELEVATION);
}
@Override
public void onStateChange(int[] stateSet) {
// Empty
}
@Override
public void draw(Canvas canvas) {
// Empty
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public void setColor(int color) {
if (mColor != color) {
mColor = color;
int darkColor = UiUtils.getDarkerColor(color);
mGradientDrawable.setColor(new ColorStateList(STATES,
new int[] {darkColor, darkColor, darkColor, color}));
}
}
}
}
|
app/src/main/java/com/hippo/widget/FloatingActionButton.java
|
/*
* Copyright (C) 2014-2015 Hippo Seven
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hippo.widget;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.ColorStateList;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.GradientDrawable;
import android.os.Build;
import android.util.AttributeSet;
import android.view.View;
import com.hippo.ehviewer.R;
import com.hippo.util.UiUtils;
import com.hippo.util.ViewUtils;
public class FloatingActionButton extends View {
private static final int DRAWABLE_WIDTH = UiUtils.dp2pix(24);
private GasketDrawer mGasketDrawer;
private Drawable mDrawable;
public FloatingActionButton(Context context, AttributeSet attrs) {
super(context, attrs);
init(context, attrs, 0, 0);
}
public FloatingActionButton(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context, attrs, defStyleAttr, 0);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public FloatingActionButton(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init(context, attrs, defStyleAttr, defStyleRes);
}
private void init(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
TypedArray a;
a = context.obtainStyledAttributes(attrs, R.styleable.FloatingActionButton, defStyleAttr, defStyleRes);
int bgColor = a.getColor(R.styleable.FloatingActionButton_fabColor, Color.BLACK);
mDrawable = a.getDrawable(R.styleable.FloatingActionButton_fabDrawable);
a.recycle();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
mGasketDrawer = new GasketDrawerLollipop(this, bgColor);
} else {
mGasketDrawer = new GasketDrawerOld(this, bgColor);
}
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
int[] stateSet = getDrawableState();
mGasketDrawer.onStateChange(stateSet);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
setMeasuredDimension(ViewUtils.getSuitableSize(getSuggestedMinimumWidth(), widthMeasureSpec),
ViewUtils.getSuitableSize(getSuggestedMinimumHeight(), heightMeasureSpec));
}
protected void onDraw(Canvas canvas) {
mGasketDrawer.draw(canvas);
if (mDrawable != null) {
int left;
int top;
int right;
int bottom;
int width = getWidth();
int height = getHeight();
int halfDrawableWidth = DRAWABLE_WIDTH / 2;
left = width / 2 - halfDrawableWidth;
top = height / 2 - halfDrawableWidth;
right = left + DRAWABLE_WIDTH;
bottom = top + DRAWABLE_WIDTH;
mDrawable.setBounds(left, top, right, bottom);
mDrawable.draw(canvas);
}
}
public void setColor(int color) {
mGasketDrawer.setColor(color);
}
public void setDrawable(Drawable drawable) {
mDrawable = drawable;
invalidate();
}
interface GasketDrawer {
void onStateChange(int[] stateSet);
void draw(Canvas canvas);
void setColor(int color);
}
static class GasketDrawerOld implements GasketDrawer {
private static final int SHADOW_RADIOUS = UiUtils.dp2pix(3);
private static final int SHADOW_OFFSET_Y = UiUtils.dp2pix(1);
private static final int GASKET_PADDING = SHADOW_RADIOUS + SHADOW_OFFSET_Y; // 4dp
private static final int SHADOW_COLOR = 0x43000000;
private View mView;
private int mColor;
private int mDarkerColor;
private Paint mPaint;
private RectF mBounds;
private boolean mIsDark;
private GasketDrawerOld(View view, int color) {
mView = view;
mBounds = new RectF();
mColor = color;
mDarkerColor = UiUtils.getDarkerColor(color);
mPaint = new Paint(Paint.ANTI_ALIAS_FLAG | Paint.DITHER_FLAG);
mPaint.setColor(mColor);
mPaint.setStyle(Paint.Style.FILL);
mPaint.setShadowLayer(SHADOW_RADIOUS, 0, SHADOW_OFFSET_Y, SHADOW_COLOR);
ViewUtils.removeHardwareAccelerationSupport(mView);
}
public void onStateChange(int[] stateSet) {
boolean enabled = false;
boolean pressed = false;
boolean focused = false;
for (int state : stateSet) {
if (state == android.R.attr.state_enabled) {
enabled = true;
}
if (state == android.R.attr.state_focused) {
focused = true;
}
if (state == android.R.attr.state_pressed) {
pressed = true;
}
}
setDark(!enabled || pressed || focused);
}
private void setDark(boolean isDark) {
if (mIsDark != isDark) {
mIsDark = isDark;
updateColor();
}
}
private void updateColor() {
mPaint.setColor(mIsDark ? mDarkerColor : mColor);
invalidateSelf();
}
private void invalidateSelf() {
mView.invalidate();
}
public void draw(Canvas canvas) {
mBounds.left = GASKET_PADDING;
mBounds.top = GASKET_PADDING;
mBounds.right = mView.getWidth() - GASKET_PADDING;
mBounds.bottom = mView.getHeight() - GASKET_PADDING;
canvas.drawOval(mBounds, mPaint);
}
public void setColor(int color) {
mColor = color;
mDarkerColor = UiUtils.getDarkerColor(color);
updateColor();
}
}
static class GasketDrawerLollipop implements GasketDrawer {
private static final int ELEVATION = UiUtils.dp2pix(5);
private static final int[][] STATES = new int[][]{
new int[]{-android.R.attr.state_enabled},
new int[]{android.R.attr.state_pressed},
new int[]{android.R.attr.state_focused},
new int[]{}
};
private View mView;
private int mColor;
private GradientDrawable mGradientDrawable;
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public GasketDrawerLollipop(View view, int color) {
mView = view;
mColor = color;
int darkColor = UiUtils.getDarkerColor(color);
mGradientDrawable = new GradientDrawable();
mGradientDrawable.setShape(GradientDrawable.OVAL);
mGradientDrawable.setColor(new ColorStateList(STATES,
new int[] {darkColor, darkColor, darkColor, color}));
mView.setBackground(mGradientDrawable);
mView.setElevation(ELEVATION);
}
@Override
public void onStateChange(int[] stateSet) {
// Empty
}
@Override
public void draw(Canvas canvas) {
// Empty
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public void setColor(int color) {
if (mColor != color) {
mColor = color;
int darkColor = UiUtils.getDarkerColor(color);
mGradientDrawable.setColor(new ColorStateList(STATES,
new int[] {darkColor, darkColor, darkColor, color}));
}
}
}
}
|
Adjust the shadow of FloatingActionButton
|
app/src/main/java/com/hippo/widget/FloatingActionButton.java
|
Adjust the shadow of FloatingActionButton
|
|
Java
|
apache-2.0
|
bcb2c24122dec2a03ce52cb32c3bb561b62e5da1
| 0
|
qinjin/mdtc-cassandra,mshuler/cassandra,swps/cassandra,iamaleksey/cassandra,nitsanw/cassandra,mshuler/cassandra,sriki77/cassandra,hengxin/cassandra,codefollower/Cassandra-Research,leomrocha/cassandra,macintoshio/cassandra,krummas/cassandra,fengshao0907/Cassandra-Research,fengshao0907/Cassandra-Research,whitepages/cassandra,jeromatron/cassandra,clohfink/cassandra,knifewine/cassandra,ptnapoleon/cassandra,pallavi510/cassandra,snazy/cassandra,cooldoger/cassandra,JeremiahDJordan/cassandra,matthewtt/cassandra_read,spodkowinski/cassandra,blambov/cassandra,ejankan/cassandra,bdeggleston/cassandra,yonglehou/cassandra,pthomaid/cassandra,nitsanw/cassandra,tommystendahl/cassandra,pcn/cassandra-1,adelapena/cassandra,thelastpickle/cassandra,HidemotoNakada/cassandra-udf,sbtourist/cassandra,pauloricardomg/cassandra,mambocab/cassandra,pkdevbox/cassandra,Instagram/cassandra,rogerchina/cassandra,ollie314/cassandra,fengshao0907/cassandra-1,juiceblender/cassandra,iburmistrov/Cassandra,spodkowinski/cassandra,Jaumo/cassandra,hengxin/cassandra,driftx/cassandra,bdeggleston/cassandra,aarushi12002/cassandra,RyanMagnusson/cassandra,belliottsmith/cassandra,strapdata/cassandra,exoscale/cassandra,sharvanath/cassandra,vaibhi9/cassandra,likaiwalkman/cassandra,ifesdjeen/cassandra,DikangGu/cassandra,shookari/cassandra,spodkowinski/cassandra,aureagle/cassandra,DavidHerzogTU-Berlin/cassandraToRun,pofallon/cassandra,sedulam/CASSANDRA-12201,chbatey/cassandra-1,pthomaid/cassandra,christian-esken/cassandra,christian-esken/cassandra,hhorii/cassandra,scaledata/cassandra,DavidHerzogTU-Berlin/cassandra,bmel/cassandra,chaordic/cassandra,bdeggleston/cassandra,mshuler/cassandra,josh-mckenzie/cassandra,apache/cassandra,vaibhi9/cassandra,gdusbabek/cassandra,mike-tr-adamson/cassandra,aboudreault/cassandra,qinjin/mdtc-cassandra,WorksApplications/cassandra,guanxi55nba/db-improvement,juiceblender/cassandra,juiceblender/cassandra,pcn/cassandra-1,asias/cassandra,mkjellman/cassandra,darach/cassandra,weipinghe/cassandra,mheffner/cassandra-1,chbatey/cassandra-1,bcoverston/apache-hosted-cassandra,wreda/cassandra,ifesdjeen/cassandra,nutbunnies/cassandra,a-buck/cassandra,blerer/cassandra,mheffner/cassandra-1,Imran-C/cassandra,regispl/cassandra,likaiwalkman/cassandra,adelapena/cassandra,jeromatron/cassandra,mike-tr-adamson/cassandra,thobbs/cassandra,Jaumo/cassandra,EnigmaCurry/cassandra,Bj0rnen/cassandra,fengshao0907/cassandra-1,jrwest/cassandra,christian-esken/cassandra,ibmsoe/cassandra,jasonwee/cassandra,shawnkumar/cstargraph,pcmanus/cassandra,pbailis/cassandra-pbs,Stratio/cassandra,weideng1/cassandra,emolsson/cassandra,apache/cassandra,emolsson/cassandra,MasahikoSawada/cassandra,michaelsembwever/cassandra,codefollower/Cassandra-Research,ptuckey/cassandra,sivikt/cassandra,boneill42/cassandra,Stratio/cassandra,cooldoger/cassandra,yukim/cassandra,pkdevbox/cassandra,yukim/cassandra,jeffjirsa/cassandra,taigetco/cassandra_read,scylladb/scylla-tools-java,clohfink/cassandra,chaordic/cassandra,likaiwalkman/cassandra,miguel0afd/cassandra-cqlMod,blambov/cassandra,sharvanath/cassandra,boneill42/cassandra,guanxi55nba/key-value-store,jsanda/cassandra,Jaumo/cassandra,mt0803/cassandra,ifesdjeen/cassandra,pcn/cassandra-1,vaibhi9/cassandra,Stratio/stratio-cassandra,dongjiaqiang/cassandra,asias/cassandra,caidongyun/cassandra,pauloricardomg/cassandra,joesiewert/cassandra,tongjixianing/projects,bcoverston/cassandra,emolsson/cassandra,xiongzheng/Cassandra-Research,kgreav/cassandra,szhou1234/cassandra,vramaswamy456/cassandra,bcoverston/cassandra,exoscale/cassandra,beobal/cassandra,yanbit/cassandra,blerer/cassandra,Bj0rnen/cassandra,rmarchei/cassandra,LatencyUtils/cassandra-stress2,vramaswamy456/cassandra,thobbs/cassandra,caidongyun/cassandra,AtwooTM/cassandra,rdio/cassandra,macintoshio/cassandra,yhnishi/cassandra,yangzhe1991/cassandra,asias/cassandra,shookari/cassandra,nakomis/cassandra,newrelic-forks/cassandra,adejanovski/cassandra,pallavi510/cassandra,segfault/apache_cassandra,lalithsuresh/cassandra-c3,sayanh/ViewMaintenanceCassandra,michaelmior/cassandra,mklew/mmp,rogerchina/cassandra,RyanMagnusson/cassandra,codefollower/Cassandra-Research,pthomaid/cassandra,kgreav/cassandra,rogerchina/cassandra,hhorii/cassandra,blambov/cassandra,nlalevee/cassandra,nakomis/cassandra,joesiewert/cassandra,ifesdjeen/cassandra,dkua/cassandra,weideng1/cassandra,JeremiahDJordan/cassandra,szhou1234/cassandra,whitepages/cassandra,rmarchei/cassandra,pauloricardomg/cassandra,gdusbabek/cassandra,pofallon/cassandra,sivikt/cassandra,tommystendahl/cassandra,taigetco/cassandra_read,jasonstack/cassandra,nvoron23/cassandra,bmel/cassandra,LatencyUtils/cassandra-stress2,nvoron23/cassandra,pcmanus/cassandra,jbellis/cassandra,tjake/cassandra,ptuckey/cassandra,josh-mckenzie/cassandra,knifewine/cassandra,chaordic/cassandra,thelastpickle/cassandra,heiko-braun/cassandra,iamaleksey/cassandra,sedulam/CASSANDRA-12201,jeffjirsa/cassandra,tongjixianing/projects,sluk3r/cassandra,adelapena/cassandra,segfault/apache_cassandra,rdio/cassandra,snazy/cassandra,matthewtt/cassandra_read,carlyeks/cassandra,mkjellman/cassandra,rackerlabs/cloudmetrics-cassandra,blerer/cassandra,mshuler/cassandra,driftx/cassandra,iamaleksey/cassandra,scylladb/scylla-tools-java,tjake/cassandra,sluk3r/cassandra,blambov/cassandra,sayanh/ViewMaintenanceCassandra,mgmuscari/cassandra-cdh4,miguel0afd/cassandra-cqlMod,nutbunnies/cassandra,tjake/cassandra,bcoverston/apache-hosted-cassandra,krummas/cassandra,AtwooTM/cassandra,aweisberg/cassandra,sriki77/cassandra,mklew/mmp,scylladb/scylla-tools-java,Stratio/stratio-cassandra,a-buck/cassandra,gdusbabek/cassandra,mashuai/Cassandra-Research,Instagram/cassandra,jrwest/cassandra,MasahikoSawada/cassandra,leomrocha/cassandra,rmarchei/cassandra,sharvanath/cassandra,bpupadhyaya/cassandra,instaclustr/cassandra,aureagle/cassandra,aweisberg/cassandra,DavidHerzogTU-Berlin/cassandraToRun,stef1927/cassandra,Instagram/cassandra,clohfink/cassandra,yonglehou/cassandra,dongjiaqiang/cassandra,jbellis/cassandra,WorksApplications/cassandra,yangzhe1991/cassandra,dprguiuc/Cassandra-Wasef,wreda/cassandra,belliottsmith/cassandra,szhou1234/cassandra,phact/cassandra,LatencyUtils/cassandra-stress2,Jollyplum/cassandra,pofallon/cassandra,darach/cassandra,yanbit/cassandra,nlalevee/cassandra,pkdevbox/cassandra,ejankan/cassandra,pcmanus/cassandra,DavidHerzogTU-Berlin/cassandra,taigetco/cassandra_read,mt0803/cassandra,knifewine/cassandra,iamaleksey/cassandra,lalithsuresh/cassandra-c3,apache/cassandra,guard163/cassandra,mike-tr-adamson/cassandra,weipinghe/cassandra,jeromatron/cassandra,kgreav/cassandra,xiongzheng/Cassandra-Research,scaledata/cassandra,guanxi55nba/db-improvement,blerer/cassandra,DICL/cassandra,josh-mckenzie/cassandra,macintoshio/cassandra,Imran-C/cassandra,joesiewert/cassandra,sayanh/ViewMaintenanceCassandra,adelapena/cassandra,qinjin/mdtc-cassandra,carlyeks/cassandra,aboudreault/cassandra,sluk3r/cassandra,hengxin/cassandra,strapdata/cassandra,iburmistrov/Cassandra,Stratio/stratio-cassandra,mkjellman/cassandra,regispl/cassandra,sbtourist/cassandra,jsanda/cassandra,sayanh/ViewMaintenanceSupport,instaclustr/cassandra,szhou1234/cassandra,sayanh/ViewMaintenanceSupport,exoscale/cassandra,jkni/cassandra,phact/cassandra,jasonwee/cassandra,ollie314/cassandra,tongjixianing/projects,ptuckey/cassandra,ben-manes/cassandra,aweisberg/cassandra,DavidHerzogTU-Berlin/cassandra,scaledata/cassandra,jasonstack/cassandra,yonglehou/cassandra,aboudreault/cassandra,driftx/cassandra,spodkowinski/cassandra,kangkot/stratio-cassandra,yukim/cassandra,guard163/cassandra,yhnishi/cassandra,dkua/cassandra,GabrielNicolasAvellaneda/cassandra,instaclustr/cassandra,shookari/cassandra,tommystendahl/cassandra,WorksApplications/cassandra,chbatey/cassandra-1,swps/cassandra,instaclustr/cassandra,bdeggleston/cassandra,newrelic-forks/cassandra,a-buck/cassandra,pauloricardomg/cassandra,nakomis/cassandra,ptnapoleon/cassandra,rackerlabs/cloudmetrics-cassandra,strapdata/cassandra,stef1927/cassandra,yangzhe1991/cassandra,helena/cassandra,jeffjirsa/cassandra,guanxi55nba/key-value-store,aureagle/cassandra,ben-manes/cassandra,regispl/cassandra,michaelsembwever/cassandra,jsanda/cassandra,hhorii/cassandra,michaelsembwever/cassandra,darach/cassandra,mashuai/Cassandra-Research,fengshao0907/cassandra-1,bcoverston/apache-hosted-cassandra,mambocab/cassandra,weipinghe/cassandra,guanxi55nba/key-value-store,xiongzheng/Cassandra-Research,weideng1/cassandra,wreda/cassandra,aarushi12002/cassandra,Jollyplum/cassandra,jasobrown/cassandra,mike-tr-adamson/cassandra,shawnkumar/cstargraph,michaelmior/cassandra,sbtourist/cassandra,newrelic-forks/cassandra,Stratio/stratio-cassandra,guard163/cassandra,josh-mckenzie/cassandra,nlalevee/cassandra,scylladb/scylla-tools-java,swps/cassandra,MasahikoSawada/cassandra,stef1927/cassandra,clohfink/cassandra,project-zerus/cassandra,michaelsembwever/cassandra,thelastpickle/cassandra,carlyeks/cassandra,kangkot/stratio-cassandra,Jollyplum/cassandra,krummas/cassandra,aarushi12002/cassandra,nvoron23/cassandra,beobal/cassandra,adejanovski/cassandra,dprguiuc/Cassandra-Wasef,segfault/apache_cassandra,caidongyun/cassandra,jasobrown/cassandra,fengshao0907/Cassandra-Research,JeremiahDJordan/cassandra,dkua/cassandra,HidemotoNakada/cassandra-udf,mkjellman/cassandra,tjake/cassandra,mambocab/cassandra,snazy/cassandra,jasonstack/cassandra,ejankan/cassandra,phact/cassandra,miguel0afd/cassandra-cqlMod,Stratio/stratio-cassandra,jbellis/cassandra,matthewtt/cassandra_read,snazy/cassandra,mklew/mmp,guanxi55nba/db-improvement,mt0803/cassandra,shawnkumar/cstargraph,ollie314/cassandra,modempachev4/kassandra,yukim/cassandra,ibmsoe/cassandra,dprguiuc/Cassandra-Wasef,belliottsmith/cassandra,sedulam/CASSANDRA-12201,heiko-braun/cassandra,jkni/cassandra,aweisberg/cassandra,jrwest/cassandra,rdio/cassandra,bcoverston/cassandra,nutbunnies/cassandra,kangkot/stratio-cassandra,yhnishi/cassandra,sivikt/cassandra,belliottsmith/cassandra,RyanMagnusson/cassandra,ibmsoe/cassandra,kgreav/cassandra,project-zerus/cassandra,jrwest/cassandra,modempachev4/kassandra,rackerlabs/cloudmetrics-cassandra,Instagram/cassandra,cooldoger/cassandra,dongjiaqiang/cassandra,ben-manes/cassandra,mheffner/cassandra-1,mgmuscari/cassandra-cdh4,WorksApplications/cassandra,EnigmaCurry/cassandra,bpupadhyaya/cassandra,DICL/cassandra,adejanovski/cassandra,mgmuscari/cassandra-cdh4,driftx/cassandra,ptnapoleon/cassandra,stef1927/cassandra,GabrielNicolasAvellaneda/cassandra,DikangGu/cassandra,vramaswamy456/cassandra,bcoverston/cassandra,apache/cassandra,jkni/cassandra,kangkot/stratio-cassandra,pbailis/cassandra-pbs,michaelmior/cassandra,EnigmaCurry/cassandra,whitepages/cassandra,bmel/cassandra,strapdata/cassandra,cooldoger/cassandra,yanbit/cassandra,DikangGu/cassandra,juiceblender/cassandra,jasobrown/cassandra,beobal/cassandra,GabrielNicolasAvellaneda/cassandra,jeffjirsa/cassandra,pallavi510/cassandra,AtwooTM/cassandra,bpupadhyaya/cassandra,nitsanw/cassandra,sriki77/cassandra,helena/cassandra,Imran-C/cassandra,jasobrown/cassandra,iburmistrov/Cassandra,project-zerus/cassandra,mashuai/Cassandra-Research,beobal/cassandra,jasonwee/cassandra,DavidHerzogTU-Berlin/cassandraToRun,thobbs/cassandra,modempachev4/kassandra,thelastpickle/cassandra,krummas/cassandra,helena/cassandra,lalithsuresh/cassandra-c3,Stratio/cassandra,leomrocha/cassandra,heiko-braun/cassandra,pbailis/cassandra-pbs,Bj0rnen/cassandra,tommystendahl/cassandra,boneill42/cassandra,HidemotoNakada/cassandra-udf,DICL/cassandra,kangkot/stratio-cassandra
|
/**
*
*/
package org.apache.cassandra.service;
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.net.InetAddress;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.collect.Multimap;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.Table;
import org.apache.cassandra.locator.IEndpointSnitch;
import org.apache.cassandra.locator.NetworkTopologyStrategy;
import org.apache.cassandra.net.Message;
import org.apache.cassandra.thrift.ConsistencyLevel;
import org.apache.cassandra.thrift.UnavailableException;
import org.apache.cassandra.utils.FBUtilities;
/**
* This class blocks for a quorum of responses _in all datacenters_ (CL.EACH_QUORUM).
*/
public class DatacenterSyncWriteResponseHandler extends AbstractWriteResponseHandler
{
private static final IEndpointSnitch snitch = DatabaseDescriptor.getEndpointSnitch();
private static final String localdc;
static
{
localdc = snitch.getDatacenter(FBUtilities.getLocalAddress());
}
private final NetworkTopologyStrategy strategy;
private HashMap<String, AtomicInteger> responses = new HashMap<String, AtomicInteger>();
protected DatacenterSyncWriteResponseHandler(Collection<InetAddress> writeEndpoints, Multimap<InetAddress, InetAddress> hintedEndpoints, ConsistencyLevel consistencyLevel, String table)
{
// Response is been managed by the map so make it 1 for the superclass.
super(writeEndpoints, hintedEndpoints, consistencyLevel);
assert consistencyLevel == ConsistencyLevel.EACH_QUORUM;
strategy = (NetworkTopologyStrategy) Table.open(table).getReplicationStrategy();
for (String dc : strategy.getDatacenters())
{
int rf = strategy.getReplicationFactor(dc);
responses.put(dc, new AtomicInteger((rf / 2) + 1));
}
}
public static IWriteResponseHandler create(Collection<InetAddress> writeEndpoints, Multimap<InetAddress, InetAddress> hintedEndpoints, ConsistencyLevel consistencyLevel, String table)
{
return new DatacenterSyncWriteResponseHandler(writeEndpoints, hintedEndpoints, consistencyLevel, table);
}
public void response(Message message)
{
String dataCenter = message == null
? localdc
: snitch.getDatacenter(message.getFrom());
responses.get(dataCenter).getAndDecrement();
for (AtomicInteger i : responses.values())
{
if (0 < i.get())
return;
}
// all the quorum conditions are met
condition.signal();
}
public void assureSufficientLiveNodes() throws UnavailableException
{
Map<String, AtomicInteger> dcEndpoints = new HashMap<String, AtomicInteger>();
for (String dc: strategy.getDatacenters())
dcEndpoints.put(dc, new AtomicInteger());
for (InetAddress destination : hintedEndpoints.keySet())
{
assert writeEndpoints.contains(destination);
// figure out the destination dc
String destinationDC = snitch.getDatacenter(destination);
dcEndpoints.get(destinationDC).incrementAndGet();
}
// Throw exception if any of the DC doesn't have livenodes to accept write.
for (String dc: strategy.getDatacenters())
{
if (dcEndpoints.get(dc).get() != responses.get(dc).get())
throw new UnavailableException();
}
}
public boolean isLatencyForSnitch()
{
return false;
}
}
|
src/java/org/apache/cassandra/service/DatacenterSyncWriteResponseHandler.java
|
/**
*
*/
package org.apache.cassandra.service;
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.net.InetAddress;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.collect.Multimap;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.Table;
import org.apache.cassandra.locator.IEndpointSnitch;
import org.apache.cassandra.locator.NetworkTopologyStrategy;
import org.apache.cassandra.net.Message;
import org.apache.cassandra.thrift.ConsistencyLevel;
import org.apache.cassandra.thrift.UnavailableException;
import org.apache.cassandra.utils.FBUtilities;
/**
* This class blocks for a quorum of responses _in all datacenters_ (CL.EACH_QUORUM).
*/
public class DatacenterSyncWriteResponseHandler extends AbstractWriteResponseHandler
{
private static final IEndpointSnitch snitch = DatabaseDescriptor.getEndpointSnitch();
private static final String localdc;
static
{
localdc = snitch.getDatacenter(FBUtilities.getLocalAddress());
}
private final NetworkTopologyStrategy strategy;
private HashMap<String, AtomicInteger> responses = new HashMap<String, AtomicInteger>();
protected DatacenterSyncWriteResponseHandler(Collection<InetAddress> writeEndpoints, Multimap<InetAddress, InetAddress> hintedEndpoints, ConsistencyLevel consistencyLevel, String table)
{
// Response is been managed by the map so make it 1 for the superclass.
super(writeEndpoints, hintedEndpoints, consistencyLevel);
assert consistencyLevel == ConsistencyLevel.LOCAL_QUORUM;
strategy = (NetworkTopologyStrategy) Table.open(table).getReplicationStrategy();
for (String dc : strategy.getDatacenters())
{
int rf = strategy.getReplicationFactor(dc);
responses.put(dc, new AtomicInteger((rf / 2) + 1));
}
}
public static IWriteResponseHandler create(Collection<InetAddress> writeEndpoints, Multimap<InetAddress, InetAddress> hintedEndpoints, ConsistencyLevel consistencyLevel, String table)
{
return new DatacenterSyncWriteResponseHandler(writeEndpoints, hintedEndpoints, consistencyLevel, table);
}
public void response(Message message)
{
String dataCenter = message == null
? localdc
: snitch.getDatacenter(message.getFrom());
responses.get(dataCenter).getAndDecrement();
for (AtomicInteger i : responses.values())
{
if (0 < i.get())
return;
}
// all the quorum conditions are met
condition.signal();
}
public void assureSufficientLiveNodes() throws UnavailableException
{
Map<String, AtomicInteger> dcEndpoints = new HashMap<String, AtomicInteger>();
for (String dc: strategy.getDatacenters())
dcEndpoints.put(dc, new AtomicInteger());
for (InetAddress destination : hintedEndpoints.keySet())
{
assert writeEndpoints.contains(destination);
// figure out the destination dc
String destinationDC = snitch.getDatacenter(destination);
dcEndpoints.get(destinationDC).incrementAndGet();
}
// Throw exception if any of the DC doesn't have livenodes to accept write.
for (String dc: strategy.getDatacenters())
{
if (dcEndpoints.get(dc).get() != responses.get(dc).get())
throw new UnavailableException();
}
}
public boolean isLatencyForSnitch()
{
return false;
}
}
|
fix assert in DatacenterSyncWriteResponseHandler
patch by Mark Guzman; reviewed by jbellis for CASSANDRA-2254
git-svn-id: 97f024e056bb8360c163ac9719c09bffda44c4d2@1075198 13f79535-47bb-0310-9956-ffa450edef68
|
src/java/org/apache/cassandra/service/DatacenterSyncWriteResponseHandler.java
|
fix assert in DatacenterSyncWriteResponseHandler patch by Mark Guzman; reviewed by jbellis for CASSANDRA-2254
|
|
Java
|
bsd-3-clause
|
3cd92bbee3c0c3e751d27590a921206aaedf3d9d
| 0
|
NCIP/c3pr,NCIP/c3pr,NCIP/c3pr
|
package edu.duke.cabig.c3pr.domain;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Transient;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
import org.hibernate.annotations.Where;
import org.springframework.context.MessageSource;
import org.springframework.context.support.ResourceBundleMessageSource;
import edu.duke.cabig.c3pr.constants.APIName;
import edu.duke.cabig.c3pr.constants.AmendmentType;
import edu.duke.cabig.c3pr.constants.CoordinatingCenterStudyStatus;
import edu.duke.cabig.c3pr.constants.NotificationEmailSubstitutionVariablesEnum;
import edu.duke.cabig.c3pr.constants.RegistrationWorkFlowStatus;
import edu.duke.cabig.c3pr.constants.SiteStudyStatus;
import edu.duke.cabig.c3pr.constants.StatusType;
import edu.duke.cabig.c3pr.domain.factory.ParameterizedBiDirectionalInstantiateFactory;
import edu.duke.cabig.c3pr.exception.C3PRCodedRuntimeException;
import edu.duke.cabig.c3pr.exception.C3PRExceptionHelper;
import edu.duke.cabig.c3pr.utils.CommonUtils;
import edu.duke.cabig.c3pr.utils.DateUtil;
import gov.nih.nci.cabig.ctms.collections.LazyListHelper;
/**
* The Class StudySite.
*
*
* @author Ram Chilukuri, Priyatam
* @author kherm, himanshu
*/
@Entity
@DiscriminatorValue(value = "SST")
public class StudySite extends StudyOrganization implements Comparable<StudySite> {
/** The companion study association. */
private CompanionStudyAssociation companionStudyAssociation ;
/** The target accrual number. */
private Integer targetAccrualNumber;
/** The c3pr exception helper. */
private C3PRExceptionHelper c3PRExceptionHelper;
/** The c3pr error messages. */
private MessageSource c3prErrorMessages;
/** The study site study version. */
private StudySiteStudyVersion studySiteStudyVersion;
/** The study site study versions. */
private List<StudySiteStudyVersion> studySiteStudyVersions;
/** The site study status. */
private LazyListHelper lazyListHelper;
/**
* Instantiates a new study site.
*/
public StudySite() {
lazyListHelper = new LazyListHelper();
ResourceBundleMessageSource resourceBundleMessageSource = new ResourceBundleMessageSource();
resourceBundleMessageSource.setBasename("error_messages_multisite");
ResourceBundleMessageSource resourceBundleMessageSource1 = new ResourceBundleMessageSource();
resourceBundleMessageSource1.setBasename("error_messages_c3pr");
resourceBundleMessageSource1.setParentMessageSource(resourceBundleMessageSource);
this.c3prErrorMessages = resourceBundleMessageSource1;
this.c3PRExceptionHelper = new C3PRExceptionHelper(c3prErrorMessages);
studySiteStudyVersions= new ArrayList<StudySiteStudyVersion>();
lazyListHelper.add(SiteStatusHistory.class,new ParameterizedBiDirectionalInstantiateFactory<SiteStatusHistory>(SiteStatusHistory.class, this));
}
/**
* Are there any assignments using this relationship?.
*
* @return true, if checks if is used
*/
@Transient
public boolean isUsed() {
return getStudySubjects().size() > 0;
}
/**
* Gets the study subjects.
*
* @return the study subjects
*/
@Transient
public List<StudySubject> getStudySubjects() {
Set<StudySubject> h = new HashSet<StudySubject>();
for (StudySiteStudyVersion studySiteStudyVersion: getStudySiteStudyVersions()){
for(StudySubjectStudyVersion studySubjectStudyVersion: studySiteStudyVersion.getStudySubjectStudyVersions()){
h.add(studySubjectStudyVersion.getStudySubject());
}
}
List<StudySubject> studySubjects= new ArrayList<StudySubject>();
studySubjects.addAll(h);
return studySubjects;
}
/**
* Gets the target accrual number.
*
* @return the target accrual number
*/
public Integer getTargetAccrualNumber() {
return targetAccrualNumber;
}
/**
* Sets the target accrual number.
*
* @param targetAccrualNumber the new target accrual number
*/
public void setTargetAccrualNumber(Integer targetAccrualNumber) {
this.targetAccrualNumber = targetAccrualNumber;
}
/**
* Gets the current accrual count.
*
* @return the current accrual count
*/
@Transient
public int getCurrentAccrualCount() {
int count = 0;
for (StudySubject s : this.getStudySubjects()) {
if (s.getRegWorkflowStatus() == RegistrationWorkFlowStatus.ENROLLED || s.getRegWorkflowStatus() == RegistrationWorkFlowStatus.REGISTERED_BUT_NOT_ENROLLED
|| s.getRegWorkflowStatus() == RegistrationWorkFlowStatus.RESERVED) count++;
}
return count;
}
/**
* Activate.
*/
public void activate(Date effectiveDate) {
if (!(this.getSiteStudyStatus() == SiteStudyStatus.PENDING || this.getSiteStudyStatus() == SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL || this.getSiteStudyStatus() == SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT)){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STATUS_CANNOT_SET_STATUS.CODE"),new String[] { this.getSiteStudyStatus().getDisplayName() });
}
if (this.getStudy().getCoordinatingCenterStudyStatus() == CoordinatingCenterStudyStatus.OPEN) {
if(this.getSiteStudyStatus(effectiveDate) == SiteStudyStatus.PENDING) {
StudySiteStudyVersion effectiveStudySiteStudyVersion = getStudySiteStudyVersion(effectiveDate);
if(effectiveStudySiteStudyVersion != null){
effectiveStudySiteStudyVersion.apply(effectiveDate);
}else{
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.EFFECTIVE_DATE_NO_STUDY_SITE_STUDY_VERSION_FOUND.CODE"),
new String[] {CommonUtils.getDateString(effectiveDate) });
}
}
handleStudySiteStatusChange(effectiveDate, SiteStudyStatus.ACTIVE);
// TODO companion study
// Study study = this.getStudy();
// if(!study.getCompanionIndicator()){
// for(CompanionStudyAssociation companionStudyAssociation : study.getStudyVersion().getCompanionStudyAssociations()){
// for(StudySite studySite : companionStudyAssociation.getStudySites()){
// if(studySite.getHealthcareSite().getPrimaryIdentifier() == this.getHealthcareSite().getPrimaryIdentifier()){
// if(studySite.getSiteStudyStatus() != SiteStudyStatus.ACTIVE){
// studySite.activate(effectiveDate);
// }
// }
// }
// }
// }
}
else {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.STATUS_CANNOT_BE_SET_WITH_CURRENT_COORDINATING_CENTER_STATUS.CODE"),
new String[] {SiteStudyStatus.ACTIVE.getDisplayName(),this.getStudy().getCoordinatingCenterStudyStatus().getDisplayName() });
}
}
public void applyStudyAmendment(String versionName, Date irbApprovalDate) {
StudySiteStudyVersion previousStudySiteStudyVersion = getLatestStudySiteStudyVersion();
if(previousStudySiteStudyVersion == null){
throw new RuntimeException();
}
StudySiteStudyVersion newStudySiteStudyVersion = new StudySiteStudyVersion();
StudyVersion studyVersion = getStudy().getStudyVersion(versionName);
newStudySiteStudyVersion.setStudyVersion(studyVersion);
newStudySiteStudyVersion.setIrbApprovalDate(irbApprovalDate);
this.addStudySiteStudyVersion(newStudySiteStudyVersion);
newStudySiteStudyVersion.apply(irbApprovalDate);
if(previousStudySiteStudyVersion.getEndDate() == null || previousStudySiteStudyVersion.getEndDate().after(irbApprovalDate)){
GregorianCalendar cal = new GregorianCalendar();
cal.setTime(irbApprovalDate);
cal.add(Calendar.DATE, -1);
previousStudySiteStudyVersion.setEndDate(cal.getTime());
}
}
public void closeToAccrual(Date effectiveDate) throws C3PRCodedRuntimeException {
if (this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STATUS_ALREADY_CLOSED_TO_ACCRUAL.CODE"),new String[] { SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT
.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.ALREADY_CLOSED.CODE"),new String[] { SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT
.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.PENDING) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.STATUS_NEEDS_TO_BE_ACTIVE_FIRST.CODE"),new String[] { SiteStudyStatus.CLOSED_TO_ACCRUAL.getDisplayName() });
}
handleStudySiteStatusChange(effectiveDate, SiteStudyStatus.CLOSED_TO_ACCRUAL) ;
}
public void closeToAccrualAndTreatment(Date effectiveDate) {
if (this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STATUS_ALREADY_CLOSED_TO_ACCRUAL_AND_TREATMENT.CODE"),new String[] { SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT
.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.PENDING ){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.STATUS_NEEDS_TO_BE_ACTIVE_FIRST.CODE"),new String[] { SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT
.getDisplayName() });
}
handleStudySiteStatusChange(effectiveDate, SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT) ;
}
public void temporarilyCloseToAccrualAndTreatment(Date effectiveDate) {
if (this.getSiteStudyStatus() == SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STATUS_ALREADY_TEMPORARY_CLOSED_TO_ACCRUAL_AND_TREATMENT.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.PENDING) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.STATUS_NEEDS_TO_BE_ACTIVE_FIRST.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL || this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.ALREADY_CLOSED.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT.getDisplayName() });
}
handleStudySiteStatusChange(effectiveDate, SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT) ;
}
public void temporarilyCloseToAccrual(Date effectiveDate) throws C3PRCodedRuntimeException {
if (this.getSiteStudyStatus() == SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.ALREADY.TEMPORARILY_CLOSED_TO_ACCRUAL.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STATUS_ALREADY_TEMPORARY_CLOSED_TO_ACCRUAL_AND_TREATMENT.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.PENDING) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.STATUS_NEEDS_TO_BE_ACTIVE_FIRST.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL || this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.ALREADY_CLOSED.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT.getDisplayName() });
}
handleStudySiteStatusChange(effectiveDate, SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL) ;
}
/**
* Gets the code.
*
* @param errortypeString the errortype string
*
* @return the code
*/
@Transient
public int getCode(String errortypeString) {
return Integer.parseInt(this.c3prErrorMessages.getMessage(errortypeString, null, null));
}
@Transient
public C3PRExceptionHelper getC3PRExceptionHelper() {
return c3PRExceptionHelper;
}
public void setExceptionHelper(C3PRExceptionHelper c3PRExceptionHelper) {
this.c3PRExceptionHelper = c3PRExceptionHelper;
}
@Transient
public MessageSource getC3prErrorMessages() {
return c3prErrorMessages;
}
public void setC3prErrorMessages(MessageSource errorMessages) {
c3prErrorMessages = errorMessages;
}
@Transient
/*
* Used by the notifications use case to compose the email message by replacing the sub vars.
*/
public Map<Object, Object> buildMapForNotification() {
Map<Object, Object> map = new HashMap<Object, Object>();
map.put(NotificationEmailSubstitutionVariablesEnum.STUDY_SITE_STATUS.toString(),getSiteStudyStatus().getDisplayName() == null ? "status": getSiteStudyStatus().getDisplayName());
map.put(NotificationEmailSubstitutionVariablesEnum.STUDY_ID.toString(),getHealthcareSite().getName() == null ? "site name": getHealthcareSite().getName().toString());
map.put(NotificationEmailSubstitutionVariablesEnum.STUDY_SHORT_TITLE.toString(), getStudy().getShortTitleText() == null ? "Short Title" : getStudy().getShortTitleText().toString());
map.put(NotificationEmailSubstitutionVariablesEnum.STUDY_SITE_CURRENT_ACCRUAL.toString(),getStudy().getCurrentAccrualCount() == null ? "Study site current accrual" : getStudy().getCurrentAccrualCount().toString());
map.put(NotificationEmailSubstitutionVariablesEnum.STUDY_ACCRUAL_THRESHOLD.toString(),getStudy().getTargetAccrualNumber() == null ? "Study site accrual threshold" : getStudy().getTargetAccrualNumber().toString());
return map;
}
@Transient
public List<APIName> getPossibleTransitions(){
List<APIName> possibleActions=new ArrayList<APIName>();
SiteStatusHistory siteHistory = getSiteStatusHistory(new Date());
if(siteHistory != null && siteHistory.getEndDate() != null){
return possibleActions;
}
if(this.getStudy().getCoordinatingCenterStudyStatus() != CoordinatingCenterStudyStatus.OPEN){
return possibleActions;
}
if(this.getSiteStudyStatus() == SiteStudyStatus.PENDING){
possibleActions.add(APIName.ACTIVATE_STUDY_SITE);
return possibleActions;
}else if(this.getSiteStudyStatus()==SiteStudyStatus.ACTIVE){
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL);
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
possibleActions.add(APIName.TEMPORARILY_CLOSE_STUDY_SITE_TO_ACCRUAL);
possibleActions.add(APIName.TEMPORARILY_CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
return possibleActions;
}else if(this.getSiteStudyStatus()==SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL){
possibleActions.add(APIName.ACTIVATE_STUDY_SITE);
possibleActions.add(APIName.TEMPORARILY_CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL);
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
return possibleActions;
}else if(this.getSiteStudyStatus()==SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT){
possibleActions.add(APIName.ACTIVATE_STUDY_SITE);
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL);
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
return possibleActions;
}else if(this.getSiteStudyStatus()==SiteStudyStatus.CLOSED_TO_ACCRUAL){
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
return possibleActions;
}
return possibleActions;
}
@ManyToOne
@Cascade( { CascadeType.LOCK})
@JoinColumn(name = "comp_assoc_id" , insertable=false, updatable=false)
public CompanionStudyAssociation getCompanionStudyAssociation() {
return companionStudyAssociation;
}
public void setCompanionStudyAssociation(
CompanionStudyAssociation companionStudyAssociation) {
this.companionStudyAssociation = companionStudyAssociation;
}
@Transient
public StudySiteStudyVersion getStudySiteStudyVersion(){
if(studySiteStudyVersion == null){
int size = getStudySiteStudyVersions().size();
if(size == 0 ) {
throw getC3PRExceptionHelper().getRuntimeException(
getCode("C3PR.EXCEPTION.STUDYSITE.CORRUPT.STATE.CODE"), new String[] { this.getHealthcareSite().getName(), this.getStudy().getCoordinatingCenterStudyStatus().getDisplayName()});
}
studySiteStudyVersion= getStudySiteStudyVersion(new Date());
}
return studySiteStudyVersion;
}
public void setStudySiteStudyVersion(StudySiteStudyVersion studySiteStudyVersion) {
this.studySiteStudyVersion = studySiteStudyVersion;
}
@Transient
public StudySiteStudyVersion getLatestStudySiteStudyVersion(){
TreeSet<StudySiteStudyVersion> studySiteStudyVersionSet = new TreeSet<StudySiteStudyVersion>();
studySiteStudyVersionSet.addAll(getStudySiteStudyVersions());
return studySiteStudyVersionSet.last();
}
/**
* Gets the study site study version for a given date.
* Due to amendments, participating sites can be on multiple version,
* however at a given date the site should only be using a single study version.
* Use this method to fetch the version applicable on a date.
*
* @param date the date
*
* @return the study site study version, null if no study version was active on the given date
*/
@Transient
public StudySiteStudyVersion getStudySiteStudyVersion(Date date){
for(StudySiteStudyVersion studySiteStudyVersion : getStudySiteStudyVersions()){
if(studySiteStudyVersion.isValid(date)){
return studySiteStudyVersion;
}
}
return null;
}
public StudySiteStudyVersion getAccruingStudySiteStudyVersion(Date date){
SiteStudyStatus status = getSiteStudyStatus(date);
if(status == SiteStudyStatus.ACTIVE){
return getStudySiteStudyVersion(date);
}
return null;
}
@Transient
public StudySiteStudyVersion getLatestAccruingStudySiteStudyVersion(){
return getAccruingStudySiteStudyVersion(new Date());
}
/**
* Gets the study study version for a given date.
* Due to amendments, participating sites can be on multiple version,
* however at a given date the site should only be using a single study version.
* Use this method to fetch the version applicable on a date.
*
* @param date the date
*
* @return the study version, null if no study version was active on the given date
*/
@Transient
public StudyVersion getActiveStudyVersion(Date date){
StudySiteStudyVersion studySiteStudyVersion = getStudySiteStudyVersion(date);
if(studySiteStudyVersion != null){
SiteStudyStatus status = getSiteStudyStatus(date);
if(status == SiteStudyStatus.ACTIVE){
return studySiteStudyVersion.getStudyVersion();
}
}
return null;
}
/**
* Checks if study version setup is valid.
* This methods confirms if on a given date, the study version that the site is using
* is same as the one the study deems as latest.
* @param date the date
*
* throws RuntimeException:
* -if no study version is available for a given date
* -Code 347: if the study site does not have any study version while the study expects the
* site to get the IRB approval for a study version
* -Code 348: if the study site study version does not match the study version of the study on the
* given date. However the version does grant a grace period.
* -Code 349: if the study site study version does not match the study version of the study on the
* given date. However the version is an optional amendment.
*/
public void isStudyVersionSetupValid(Date date){
StudyVersion coCenterStudyVersion = getStudy().getStudyVersion(date);
StudySiteStudyVersion studySiteStudyVersion = getStudySiteStudyVersion(date);
if(coCenterStudyVersion == null){
throw getC3PRExceptionHelper().getRuntimeException( getCode("C3PR.EXCEPTION.STUDYSITE.STUDYVERSION.NO_VERSION_FOUND.CODE"));
}
if(coCenterStudyVersion.getVersionStatus() == StatusType.IN){
throw getC3PRExceptionHelper().getRuntimeException( getCode("C3PR.EXCEPTION.STUDYSITE.STUDYVERSION.PENDING.CODE"));
}
if(studySiteStudyVersion == null){
throw getC3PRExceptionHelper().getRuntimeException( getCode("C3PR.EXCEPTION.STUDYSITE.STUDYVERSION.IMMEDIATE.CODE"));
}
if(coCenterStudyVersion == studySiteStudyVersion.getStudyVersion()){
return;
}
if(coCenterStudyVersion.getAmendmentType() == AmendmentType.IMMEDIATE_AFTER_GRACE_PERIOD){
long daysLeft = 0;
if(studySiteStudyVersion.getEndDate() != null){
daysLeft = (studySiteStudyVersion.getEndDate().getTime() - new Date().getTime()) / (1000*60*60*24);
}
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STUDYVERSION.GRACE.CODE"),new String[] {Long.toString(daysLeft)});
}
if(coCenterStudyVersion.getAmendmentType() == AmendmentType.OPTIONAL){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STUDYVERSION.OPTIONAL.CODE"));
}
}
/**
* Checks if the current study version setup is valid.
*/
public void isStudyVersionSetupValid(){
if(getSiteStudyStatus() != SiteStudyStatus.PENDING){
isStudyVersionSetupValid(new Date());
}
}
/**
* Checks if study site can accrue a subject
* on a study version on a given date.
*
* @param studyVersion the study version
* @param date the date
*
* @return true, if is registerable
*/
public boolean canEnroll(StudyVersion studyVersion , Date date){
StudySiteStudyVersion studySiteStudyVersion = getStudySiteStudyVersion(date);
return studySiteStudyVersion == null ? false : (studySiteStudyVersion.getStudyVersion() == studyVersion);
}
@OneToMany(mappedBy = "studySite")
@Cascade(value = { CascadeType.ALL, CascadeType.DELETE_ORPHAN })
public List<StudySiteStudyVersion> getStudySiteStudyVersions() {
return studySiteStudyVersions;
}
@Transient
public List<StudySiteStudyVersion> getSortedStudySiteStudyVersions() {
List<StudySiteStudyVersion> sortedStudySiteStudyVersions = new ArrayList<StudySiteStudyVersion>();
sortedStudySiteStudyVersions.addAll(getStudySiteStudyVersions());
Collections.sort(getStudySiteStudyVersions());
return sortedStudySiteStudyVersions;
}
public void setStudySiteStudyVersions(List<StudySiteStudyVersion> studySiteStudyVersions) {
this.studySiteStudyVersions = studySiteStudyVersions;
}
public void addStudySiteStudyVersion(StudySiteStudyVersion studySiteStudyVersion) {
this.getStudySiteStudyVersions().add(studySiteStudyVersion);
studySiteStudyVersion.setStudySite(this);
}
public void setup(Study study) {
// this is the method where we will setup the study site for the first time.
super.setStudy(study);
// 1. initially there is no study site study version, so we are creating one and associating it to study site.
if(getStudySiteStudyVersions().size() == 0){
studySiteStudyVersion = new StudySiteStudyVersion();
// 2. If we have active study version available , we are associating that study versin to study site study version otherwise we will associate latest available.
StudyVersion studyVersion = study.getLatestActiveStudyVersion();
if(studyVersion != null){
studyVersion.addStudySiteStudyVersion(studySiteStudyVersion);
}else{
studyVersion = study.getStudyVersion();
studyVersion.addStudySiteStudyVersion(studySiteStudyVersion);
}
//3. initializing startdate of study site study version to 100 years old so that for the first time, it is not invalid
studySiteStudyVersion.setStartDate(studyVersion.getVersionDate());
this.addStudySiteStudyVersion(studySiteStudyVersion);
}
if(getSiteStatusHistory().size() == 0){
// 4. add default pending status to the study site
createDefaultStudyStatusHistory();
}
}
public void setIrbApprovalDate(Date irbApprovalDate) {
getStudySiteStudyVersion().setIrbApprovalDate(irbApprovalDate);
}
@Transient
public Date getIrbApprovalDate() {
return getStudySiteStudyVersion().getIrbApprovalDate();
}
public int compareTo(StudySite o) {
if (this.equals(o)) return 0;
else return 1;
}
@Transient
public String getIrbApprovalDateStr() {
return CommonUtils.getDateString(getIrbApprovalDate());
}
public void handleStudySiteStatusChange(Date effectiveDate, SiteStudyStatus status){
SiteStatusHistory lastSiteStatusHistory = getLatestSiteStatusHistory();
if( lastSiteStatusHistory != null){
if(lastSiteStatusHistory.getStartDate() == null){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDY.STUDYSITE.STATUS_HISTORY.NO.START_DATE.CODE"),new String[] {lastSiteStatusHistory.getSiteStudyStatus().getDisplayName()});
}else if(lastSiteStatusHistory.getStartDate() != null && !lastSiteStatusHistory.getStartDate().before(effectiveDate)){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDY.STUDYSITE.STATUS_HISTORY.INVALID.EFFECTIVE_DATE.CODE"),new String[] {CommonUtils.getDateString(lastSiteStatusHistory.getStartDate())});
}
if(lastSiteStatusHistory.getEndDate() != null){
// last history object should not have end date
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDY.STUDYSITE.STATUS_HISTORY.END_DATE_PRESENT.CODE"),new String[] {lastSiteStatusHistory.getSiteStudyStatus().getDisplayName()});
}else{
Date suggestedEndDate = effectiveDate;
GregorianCalendar calendar = new GregorianCalendar();
calendar.setTime(suggestedEndDate);
calendar.add(calendar.DATE, -1);
lastSiteStatusHistory.setEndDate(calendar.getTime());
}
}
createSiteStatusHistory(effectiveDate, status);
}
@Transient
public SiteStudyStatus getSiteStudyStatus() {
return getSiteStudyStatus(new Date());
}
@Transient
public SiteStudyStatus getSiteStudyStatus(Date date) {
SiteStatusHistory siteStatusHistory = getSiteStatusHistory(date);
if(siteStatusHistory != null) {
return siteStatusHistory.getSiteStudyStatus();
}else {
createDefaultStudyStatusHistory();
return SiteStudyStatus.PENDING;
}
}
@Transient
public SiteStatusHistory getLatestSiteStatusHistory(){
TreeSet<SiteStatusHistory> siteStatusHistorySet = new TreeSet<SiteStatusHistory>();
siteStatusHistorySet.addAll(getSiteStatusHistory());
if(siteStatusHistorySet.size() > 0){
return siteStatusHistorySet.last();
}
return null;
}
@Transient
public SiteStatusHistory getSiteStatusHistory(Date date){
Date newDate = DateUtil.getUtilDateFromString(DateUtil.formatDate(date, "MM/dd/yyyy"), "MM/dd/yyyy");
List<SiteStatusHistory> siteStatusHistoryList = new ArrayList<SiteStatusHistory>();
siteStatusHistoryList.addAll(this.getSiteStatusHistory());
Collections.sort(siteStatusHistoryList);
for(SiteStatusHistory siteStatusHistory : siteStatusHistoryList){
Date startDate = siteStatusHistory.getStartDate();
Date endDate = siteStatusHistory.getEndDate();
if(!startDate.after(newDate) && (endDate == null ? true : !endDate.before(newDate))) {
return siteStatusHistory ;
}
}
return null ;
}
private void createDefaultStudyStatusHistory() {
Date currentDate = new Date();
GregorianCalendar calendar = new GregorianCalendar();
calendar.setTime(currentDate);
calendar.add(calendar.YEAR, -100);
SiteStatusHistory siteStatusHistory = new SiteStatusHistory();
siteStatusHistory.setStartDate(calendar.getTime());
siteStatusHistory.setSiteStudyStatus(SiteStudyStatus.PENDING);
this.addSiteStatusHistory(siteStatusHistory);
}
private void createSiteStatusHistory(Date startDate, SiteStudyStatus status) {
SiteStatusHistory siteStatusHistory = new SiteStatusHistory();
siteStatusHistory.setStartDate(startDate);
siteStatusHistory.setSiteStudyStatus(status);
this.addSiteStatusHistory(siteStatusHistory);
}
@OneToMany(mappedBy = "studySite", fetch = FetchType.LAZY)
@Cascade(value = { CascadeType.ALL, CascadeType.DELETE_ORPHAN })
@Where(clause = "retired_indicator = 'false'")
public List<SiteStatusHistory> getSiteStatusHistoryInternal() {
return lazyListHelper.getInternalList(SiteStatusHistory.class);
}
public void setSiteStatusHistoryInternal(final List<SiteStatusHistory> siteStatusHistory) {
lazyListHelper.setInternalList(SiteStatusHistory.class, siteStatusHistory);
}
@Transient
public List<SiteStatusHistory> getSiteStatusHistory() {
return lazyListHelper.getLazyList(SiteStatusHistory.class);
}
@Transient
public List<SiteStatusHistory> getSortedSiteStatusHistory() {
List<SiteStatusHistory> siteStatusHistoryList = new ArrayList<SiteStatusHistory>();
siteStatusHistoryList.addAll(getSiteStatusHistory());
Collections.sort(siteStatusHistoryList);
return siteStatusHistoryList;
}
public void setSiteStatusHistory(List<SiteStatusHistory> siteStatusHistory) {
setSiteStatusHistoryInternal(siteStatusHistory);
}
public void addSiteStatusHistory(SiteStatusHistory siteStatusHistory) {
siteStatusHistory.setStudySite(this);
getSiteStatusHistoryInternal().add(siteStatusHistory);
}
@Transient
public SiteStatusHistory getNextPossibleSiteStatusHistory(){
SiteStatusHistory siteHistory = getSiteStatusHistory(new Date());
if(siteHistory.getEndDate() != null){
List<SiteStatusHistory> listSiteStatusHistories = new ArrayList<SiteStatusHistory>();
listSiteStatusHistories.addAll(getSiteStatusHistory());
Collections.sort(listSiteStatusHistories);
int index = listSiteStatusHistories.indexOf(siteHistory);
if(listSiteStatusHistories.size() > index){
return listSiteStatusHistories.get(index + 1);
}
}
return null;
}
@Transient
public StudySiteStudyVersion getCurrentStudySiteStudyVersion(){
Date currentDate = new Date();
StudySiteStudyVersion currentSiteStudyVersion= getStudySiteStudyVersion(currentDate);
if(currentSiteStudyVersion == null){
List<StudySiteStudyVersion> listStudySiteStudyVersion = getSortedStudySiteStudyVersions();
return listStudySiteStudyVersion.get(listStudySiteStudyVersion.size() - 1);
}
return currentSiteStudyVersion;
}
}
|
codebase/projects/core/src/java/edu/duke/cabig/c3pr/domain/StudySite.java
|
package edu.duke.cabig.c3pr.domain;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import javax.persistence.Column;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.Transient;
import org.hibernate.annotations.Cascade;
import org.hibernate.annotations.CascadeType;
import org.hibernate.annotations.Where;
import org.springframework.context.MessageSource;
import org.springframework.context.support.ResourceBundleMessageSource;
import edu.duke.cabig.c3pr.constants.APIName;
import edu.duke.cabig.c3pr.constants.AmendmentType;
import edu.duke.cabig.c3pr.constants.CoordinatingCenterStudyStatus;
import edu.duke.cabig.c3pr.constants.NotificationEmailSubstitutionVariablesEnum;
import edu.duke.cabig.c3pr.constants.RegistrationWorkFlowStatus;
import edu.duke.cabig.c3pr.constants.SiteStudyStatus;
import edu.duke.cabig.c3pr.constants.StatusType;
import edu.duke.cabig.c3pr.domain.factory.ParameterizedBiDirectionalInstantiateFactory;
import edu.duke.cabig.c3pr.exception.C3PRCodedRuntimeException;
import edu.duke.cabig.c3pr.exception.C3PRExceptionHelper;
import edu.duke.cabig.c3pr.utils.CommonUtils;
import edu.duke.cabig.c3pr.utils.DateUtil;
import gov.nih.nci.cabig.ctms.collections.LazyListHelper;
/**
* The Class StudySite.
*
*
* @author Ram Chilukuri, Priyatam
* @author kherm, himanshu
*/
@Entity
@DiscriminatorValue(value = "SST")
public class StudySite extends StudyOrganization implements Comparable<StudySite> {
/** The companion study association. */
private CompanionStudyAssociation companionStudyAssociation ;
/** The target accrual number. */
private Integer targetAccrualNumber;
/** The c3pr exception helper. */
private C3PRExceptionHelper c3PRExceptionHelper;
/** The c3pr error messages. */
private MessageSource c3prErrorMessages;
/** The study site study version. */
private StudySiteStudyVersion studySiteStudyVersion;
/** The study site study versions. */
private List<StudySiteStudyVersion> studySiteStudyVersions;
/** The site study status. */
private LazyListHelper lazyListHelper;
/**
* Instantiates a new study site.
*/
public StudySite() {
lazyListHelper = new LazyListHelper();
ResourceBundleMessageSource resourceBundleMessageSource = new ResourceBundleMessageSource();
resourceBundleMessageSource.setBasename("error_messages_multisite");
ResourceBundleMessageSource resourceBundleMessageSource1 = new ResourceBundleMessageSource();
resourceBundleMessageSource1.setBasename("error_messages_c3pr");
resourceBundleMessageSource1.setParentMessageSource(resourceBundleMessageSource);
this.c3prErrorMessages = resourceBundleMessageSource1;
this.c3PRExceptionHelper = new C3PRExceptionHelper(c3prErrorMessages);
studySiteStudyVersions= new ArrayList<StudySiteStudyVersion>();
lazyListHelper.add(SiteStatusHistory.class,new ParameterizedBiDirectionalInstantiateFactory<SiteStatusHistory>(SiteStatusHistory.class, this));
}
/**
* Are there any assignments using this relationship?.
*
* @return true, if checks if is used
*/
@Transient
public boolean isUsed() {
return getStudySubjects().size() > 0;
}
/**
* Gets the study subjects.
*
* @return the study subjects
*/
@Transient
public List<StudySubject> getStudySubjects() {
Set<StudySubject> h = new HashSet<StudySubject>();
for (StudySiteStudyVersion studySiteStudyVersion: getStudySiteStudyVersions()){
for(StudySubjectStudyVersion studySubjectStudyVersion: studySiteStudyVersion.getStudySubjectStudyVersions()){
h.add(studySubjectStudyVersion.getStudySubject());
}
}
List<StudySubject> studySubjects= new ArrayList<StudySubject>();
studySubjects.addAll(h);
return studySubjects;
}
/**
* Gets the target accrual number.
*
* @return the target accrual number
*/
public Integer getTargetAccrualNumber() {
return targetAccrualNumber;
}
/**
* Sets the target accrual number.
*
* @param targetAccrualNumber the new target accrual number
*/
public void setTargetAccrualNumber(Integer targetAccrualNumber) {
this.targetAccrualNumber = targetAccrualNumber;
}
/**
* Gets the current accrual count.
*
* @return the current accrual count
*/
@Transient
public int getCurrentAccrualCount() {
int count = 0;
for (StudySubject s : this.getStudySubjects()) {
if (s.getRegWorkflowStatus() == RegistrationWorkFlowStatus.ENROLLED || s.getRegWorkflowStatus() == RegistrationWorkFlowStatus.REGISTERED_BUT_NOT_ENROLLED
|| s.getRegWorkflowStatus() == RegistrationWorkFlowStatus.RESERVED) count++;
}
return count;
}
/**
* Activate.
*/
public void activate(Date effectiveDate) {
if (!(this.getSiteStudyStatus() == SiteStudyStatus.PENDING || this.getSiteStudyStatus() == SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL || this.getSiteStudyStatus() == SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT)){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STATUS_CANNOT_SET_STATUS.CODE"),new String[] { this.getSiteStudyStatus().getDisplayName() });
}
if (this.getStudy().getCoordinatingCenterStudyStatus() == CoordinatingCenterStudyStatus.OPEN) {
if(this.getSiteStudyStatus(effectiveDate) == SiteStudyStatus.PENDING) {
StudySiteStudyVersion effectiveStudySiteStudyVersion = getStudySiteStudyVersion(effectiveDate);
if(effectiveStudySiteStudyVersion != null){
effectiveStudySiteStudyVersion.apply(effectiveDate);
}else{
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.EFFECTIVE_DATE_NO_STUDY_SITE_STUDY_VERSION_FOUND.CODE"),
new String[] {CommonUtils.getDateString(effectiveDate) });
}
}
handleStudySiteStatusChange(effectiveDate, SiteStudyStatus.ACTIVE);
// TODO companion study
// Study study = this.getStudy();
// if(!study.getCompanionIndicator()){
// for(CompanionStudyAssociation companionStudyAssociation : study.getStudyVersion().getCompanionStudyAssociations()){
// for(StudySite studySite : companionStudyAssociation.getStudySites()){
// if(studySite.getHealthcareSite().getPrimaryIdentifier() == this.getHealthcareSite().getPrimaryIdentifier()){
// if(studySite.getSiteStudyStatus() != SiteStudyStatus.ACTIVE){
// studySite.activate(effectiveDate);
// }
// }
// }
// }
// }
}
else {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.STATUS_CANNOT_BE_SET_WITH_CURRENT_COORDINATING_CENTER_STATUS.CODE"),
new String[] {SiteStudyStatus.ACTIVE.getDisplayName(),this.getStudy().getCoordinatingCenterStudyStatus().getDisplayName() });
}
}
public void applyStudyAmendment(String versionName, Date irbApprovalDate) {
StudySiteStudyVersion previousStudySiteStudyVersion = getLatestStudySiteStudyVersion();
if(previousStudySiteStudyVersion == null){
throw new RuntimeException();
}
StudySiteStudyVersion newStudySiteStudyVersion = new StudySiteStudyVersion();
StudyVersion studyVersion = getStudy().getStudyVersion(versionName);
newStudySiteStudyVersion.setStudyVersion(studyVersion);
newStudySiteStudyVersion.setIrbApprovalDate(irbApprovalDate);
this.addStudySiteStudyVersion(newStudySiteStudyVersion);
newStudySiteStudyVersion.apply(irbApprovalDate);
if(previousStudySiteStudyVersion.getEndDate() == null || previousStudySiteStudyVersion.getEndDate().after(irbApprovalDate)){
GregorianCalendar cal = new GregorianCalendar();
cal.setTime(irbApprovalDate);
cal.add(Calendar.DATE, -1);
previousStudySiteStudyVersion.setEndDate(cal.getTime());
}
}
public void closeToAccrual(Date effectiveDate) throws C3PRCodedRuntimeException {
if (this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STATUS_ALREADY_CLOSED_TO_ACCRUAL.CODE"),new String[] { SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT
.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.ALREADY_CLOSED.CODE"),new String[] { SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT
.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.PENDING) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.STATUS_NEEDS_TO_BE_ACTIVE_FIRST.CODE"),new String[] { SiteStudyStatus.CLOSED_TO_ACCRUAL.getDisplayName() });
}
handleStudySiteStatusChange(effectiveDate, SiteStudyStatus.CLOSED_TO_ACCRUAL) ;
}
public void closeToAccrualAndTreatment(Date effectiveDate) {
if (this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STATUS_ALREADY_CLOSED_TO_ACCRUAL_AND_TREATMENT.CODE"),new String[] { SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT
.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.PENDING ){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.STATUS_NEEDS_TO_BE_ACTIVE_FIRST.CODE"),new String[] { SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT
.getDisplayName() });
}
handleStudySiteStatusChange(effectiveDate, SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT) ;
}
public void temporarilyCloseToAccrualAndTreatment(Date effectiveDate) {
if (this.getSiteStudyStatus() == SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STATUS_ALREADY_TEMPORARY_CLOSED_TO_ACCRUAL_AND_TREATMENT.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.PENDING) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.STATUS_NEEDS_TO_BE_ACTIVE_FIRST.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL || this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.ALREADY_CLOSED.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT.getDisplayName() });
}
handleStudySiteStatusChange(effectiveDate, SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT) ;
}
public void temporarilyCloseToAccrual(Date effectiveDate) throws C3PRCodedRuntimeException {
if (this.getSiteStudyStatus() == SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.ALREADY.TEMPORARILY_CLOSED_TO_ACCRUAL.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STATUS_ALREADY_TEMPORARY_CLOSED_TO_ACCRUAL_AND_TREATMENT.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.PENDING) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.STATUS_NEEDS_TO_BE_ACTIVE_FIRST.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL.getDisplayName() });
}
if (this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL || this.getSiteStudyStatus() == SiteStudyStatus.CLOSED_TO_ACCRUAL_AND_TREATMENT) {
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.SITE.STUDY.ALREADY_CLOSED.CODE"),new String[] { SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT.getDisplayName() });
}
handleStudySiteStatusChange(effectiveDate, SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL) ;
}
/**
* Gets the code.
*
* @param errortypeString the errortype string
*
* @return the code
*/
@Transient
public int getCode(String errortypeString) {
return Integer.parseInt(this.c3prErrorMessages.getMessage(errortypeString, null, null));
}
@Transient
public C3PRExceptionHelper getC3PRExceptionHelper() {
return c3PRExceptionHelper;
}
public void setExceptionHelper(C3PRExceptionHelper c3PRExceptionHelper) {
this.c3PRExceptionHelper = c3PRExceptionHelper;
}
@Transient
public MessageSource getC3prErrorMessages() {
return c3prErrorMessages;
}
public void setC3prErrorMessages(MessageSource errorMessages) {
c3prErrorMessages = errorMessages;
}
@Transient
/*
* Used by the notifications use case to compose the email message by replacing the sub vars.
*/
public Map<Object, Object> buildMapForNotification() {
Map<Object, Object> map = new HashMap<Object, Object>();
map.put(NotificationEmailSubstitutionVariablesEnum.STUDY_SITE_STATUS.toString(),getSiteStudyStatus().getDisplayName() == null ? "status": getSiteStudyStatus().getDisplayName());
map.put(NotificationEmailSubstitutionVariablesEnum.STUDY_ID.toString(),getHealthcareSite().getName() == null ? "site name": getHealthcareSite().getName().toString());
map.put(NotificationEmailSubstitutionVariablesEnum.STUDY_SHORT_TITLE.toString(), getStudy().getShortTitleText() == null ? "Short Title" : getStudy().getShortTitleText().toString());
map.put(NotificationEmailSubstitutionVariablesEnum.STUDY_SITE_CURRENT_ACCRUAL.toString(),getStudy().getCurrentAccrualCount() == null ? "Study site current accrual" : getStudy().getCurrentAccrualCount().toString());
map.put(NotificationEmailSubstitutionVariablesEnum.STUDY_ACCRUAL_THRESHOLD.toString(),getStudy().getTargetAccrualNumber() == null ? "Study site accrual threshold" : getStudy().getTargetAccrualNumber().toString());
return map;
}
@Transient
public List<APIName> getPossibleTransitions(){
List<APIName> possibleActions=new ArrayList<APIName>();
SiteStatusHistory siteHistory = getSiteStatusHistory(new Date());
if(siteHistory != null && siteHistory.getEndDate() != null){
return possibleActions;
}
if(this.getStudy().getCoordinatingCenterStudyStatus() != CoordinatingCenterStudyStatus.OPEN){
return possibleActions;
}
if(this.getSiteStudyStatus() == SiteStudyStatus.PENDING){
possibleActions.add(APIName.ACTIVATE_STUDY_SITE);
return possibleActions;
}else if(this.getSiteStudyStatus()==SiteStudyStatus.ACTIVE){
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL);
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
possibleActions.add(APIName.TEMPORARILY_CLOSE_STUDY_SITE_TO_ACCRUAL);
possibleActions.add(APIName.TEMPORARILY_CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
return possibleActions;
}else if(this.getSiteStudyStatus()==SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL){
possibleActions.add(APIName.ACTIVATE_STUDY_SITE);
possibleActions.add(APIName.TEMPORARILY_CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL);
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
return possibleActions;
}else if(this.getSiteStudyStatus()==SiteStudyStatus.TEMPORARILY_CLOSED_TO_ACCRUAL_AND_TREATMENT){
possibleActions.add(APIName.ACTIVATE_STUDY_SITE);
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL);
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
return possibleActions;
}else if(this.getSiteStudyStatus()==SiteStudyStatus.CLOSED_TO_ACCRUAL){
possibleActions.add(APIName.CLOSE_STUDY_SITE_TO_ACCRUAL_AND_TREATMENT);
return possibleActions;
}
return possibleActions;
}
@ManyToOne
@Cascade( { CascadeType.LOCK})
@JoinColumn(name = "comp_assoc_id" , insertable=false, updatable=false)
public CompanionStudyAssociation getCompanionStudyAssociation() {
return companionStudyAssociation;
}
public void setCompanionStudyAssociation(
CompanionStudyAssociation companionStudyAssociation) {
this.companionStudyAssociation = companionStudyAssociation;
}
@Transient
public StudySiteStudyVersion getStudySiteStudyVersion(){
if(studySiteStudyVersion == null){
int size = getStudySiteStudyVersions().size();
if(size == 0 ) {
throw getC3PRExceptionHelper().getRuntimeException(
getCode("C3PR.EXCEPTION.STUDYSITE.CORRUPT.STATE.CODE"), new String[] { this.getHealthcareSite().getName(), this.getStudy().getCoordinatingCenterStudyStatus().getDisplayName()});
}
studySiteStudyVersion= getStudySiteStudyVersion(new Date());
}
return studySiteStudyVersion;
}
public void setStudySiteStudyVersion(StudySiteStudyVersion studySiteStudyVersion) {
this.studySiteStudyVersion = studySiteStudyVersion;
}
@Transient
public StudySiteStudyVersion getLatestStudySiteStudyVersion(){
TreeSet<StudySiteStudyVersion> studySiteStudyVersionSet = new TreeSet<StudySiteStudyVersion>();
studySiteStudyVersionSet.addAll(getStudySiteStudyVersions());
return studySiteStudyVersionSet.last();
}
/**
* Gets the study site study version for a given date.
* Due to amendments, participating sites can be on multiple version,
* however at a given date the site should only be using a single study version.
* Use this method to fetch the version applicable on a date.
*
* @param date the date
*
* @return the study site study version, null if no study version was active on the given date
*/
@Transient
public StudySiteStudyVersion getStudySiteStudyVersion(Date date){
for(StudySiteStudyVersion studySiteStudyVersion : getStudySiteStudyVersions()){
if(studySiteStudyVersion.isValid(date)){
return studySiteStudyVersion;
}
}
return null;
}
public StudySiteStudyVersion getAccruingStudySiteStudyVersion(Date date){
SiteStudyStatus status = getSiteStudyStatus(date);
if(status == SiteStudyStatus.ACTIVE){
return getStudySiteStudyVersion(date);
}
return null;
}
@Transient
public StudySiteStudyVersion getLatestAccruingStudySiteStudyVersion(){
return getAccruingStudySiteStudyVersion(new Date());
}
/**
* Gets the study study version for a given date.
* Due to amendments, participating sites can be on multiple version,
* however at a given date the site should only be using a single study version.
* Use this method to fetch the version applicable on a date.
*
* @param date the date
*
* @return the study version, null if no study version was active on the given date
*/
@Transient
public StudyVersion getActiveStudyVersion(Date date){
StudySiteStudyVersion studySiteStudyVersion = getStudySiteStudyVersion(date);
if(studySiteStudyVersion != null){
SiteStudyStatus status = getSiteStudyStatus(date);
if(status == SiteStudyStatus.ACTIVE){
return studySiteStudyVersion.getStudyVersion();
}
}
return null;
}
/**
* Checks if study version setup is valid.
* This methods confirms if on a given date, the study version that the site is using
* is same as the one the study deems as latest.
* @param date the date
*
* throws RuntimeException:
* -if no study version is available for a given date
* -Code 347: if the study site does not have any study version while the study expects the
* site to get the IRB approval for a study version
* -Code 348: if the study site study version does not match the study version of the study on the
* given date. However the version does grant a grace period.
* -Code 349: if the study site study version does not match the study version of the study on the
* given date. However the version is an optional amendment.
*/
public void isStudyVersionSetupValid(Date date){
StudyVersion coCenterStudyVersion = getStudy().getStudyVersion(date);
StudySiteStudyVersion studySiteStudyVersion = getStudySiteStudyVersion(date);
if(coCenterStudyVersion == null){
throw getC3PRExceptionHelper().getRuntimeException( getCode("C3PR.EXCEPTION.STUDYSITE.STUDYVERSION.NO_VERSION_FOUND.CODE"));
}
if(coCenterStudyVersion.getVersionStatus() == StatusType.IN){
throw getC3PRExceptionHelper().getRuntimeException( getCode("C3PR.EXCEPTION.STUDYSITE.STUDYVERSION.PENDING.CODE"));
}
if(studySiteStudyVersion == null){
throw getC3PRExceptionHelper().getRuntimeException( getCode("C3PR.EXCEPTION.STUDYSITE.STUDYVERSION.IMMEDIATE.CODE"));
}
if(coCenterStudyVersion == studySiteStudyVersion.getStudyVersion()){
return;
}
if(coCenterStudyVersion.getAmendmentType() == AmendmentType.IMMEDIATE_AFTER_GRACE_PERIOD){
long daysLeft = 0;
if(studySiteStudyVersion.getEndDate() != null){
daysLeft = (studySiteStudyVersion.getEndDate().getTime() - new Date().getTime()) / (1000*60*60*24);
}
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STUDYVERSION.GRACE.CODE"),new String[] {Long.toString(daysLeft)});
}
if(coCenterStudyVersion.getAmendmentType() == AmendmentType.OPTIONAL){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDYSITE.STUDYVERSION.OPTIONAL.CODE"));
}
}
/**
* Checks if the current study version setup is valid.
*/
public void isStudyVersionSetupValid(){
if(getSiteStudyStatus() != SiteStudyStatus.PENDING){
isStudyVersionSetupValid(new Date());
}
}
/**
* Checks if study site can accrue a subject
* on a study version on a given date.
*
* @param studyVersion the study version
* @param date the date
*
* @return true, if is registerable
*/
public boolean canEnroll(StudyVersion studyVersion , Date date){
StudySiteStudyVersion studySiteStudyVersion = getStudySiteStudyVersion(date);
return studySiteStudyVersion == null ? false : (studySiteStudyVersion.getStudyVersion() == studyVersion);
}
@OneToMany(mappedBy = "studySite")
@Cascade(value = { CascadeType.ALL, CascadeType.DELETE_ORPHAN })
public List<StudySiteStudyVersion> getStudySiteStudyVersions() {
return studySiteStudyVersions;
}
@Transient
public List<StudySiteStudyVersion> getSortedStudySiteStudyVersions() {
List<StudySiteStudyVersion> sortedStudySiteStudyVersions = new ArrayList<StudySiteStudyVersion>();
sortedStudySiteStudyVersions.addAll(getStudySiteStudyVersions());
Collections.sort(getStudySiteStudyVersions());
return sortedStudySiteStudyVersions;
}
public void setStudySiteStudyVersions(List<StudySiteStudyVersion> studySiteStudyVersions) {
this.studySiteStudyVersions = studySiteStudyVersions;
}
public void addStudySiteStudyVersion(StudySiteStudyVersion studySiteStudyVersion) {
this.getStudySiteStudyVersions().add(studySiteStudyVersion);
studySiteStudyVersion.setStudySite(this);
}
public void setup(Study study) {
// this is the method where we will setup the study site for the first time.
super.setStudy(study);
// 1. initially there is no study site study version, so we are creating one and associating it to study site.
if(getStudySiteStudyVersions().size() == 0){
studySiteStudyVersion = new StudySiteStudyVersion();
// 2. If we have active study version available , we are associating that study versin to study site study version otherwise we will associate latest available.
StudyVersion studyVersion = study.getLatestActiveStudyVersion();
if(studyVersion != null){
studyVersion.addStudySiteStudyVersion(studySiteStudyVersion);
}else{
studyVersion = study.getStudyVersion();
studyVersion.addStudySiteStudyVersion(studySiteStudyVersion);
}
//3. initializing startdate of study site study version to 100 years old so that for the first time, it is not invalid
studySiteStudyVersion.setStartDate(studyVersion.getVersionDate());
this.addStudySiteStudyVersion(studySiteStudyVersion);
}
if(getSiteStatusHistory().size() == 0){
// 4. add default pending status to the study site
createDefaultStudyStatusHistory();
}
}
public void setIrbApprovalDate(Date irbApprovalDate) {
getStudySiteStudyVersion().setIrbApprovalDate(irbApprovalDate);
}
@Transient
public Date getIrbApprovalDate() {
return getStudySiteStudyVersion().getIrbApprovalDate();
}
public int compareTo(StudySite o) {
if (this.equals(o)) return 0;
else return 1;
}
@Transient
public String getIrbApprovalDateStr() {
return CommonUtils.getDateString(getIrbApprovalDate());
}
public void handleStudySiteStatusChange(Date effectiveDate, SiteStudyStatus status){
SiteStatusHistory lastSiteStatusHistory = getLatestSiteStatusHistory();
if( lastSiteStatusHistory != null){
if(lastSiteStatusHistory.getStartDate() == null){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDY.STUDYSITE.STATUS_HISTORY.NO.START_DATE.CODE"),new String[] {lastSiteStatusHistory.getSiteStudyStatus().getDisplayName()});
}else if(lastSiteStatusHistory.getStartDate() != null && !lastSiteStatusHistory.getStartDate().before(effectiveDate)){
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDY.STUDYSITE.STATUS_HISTORY.INVALID.EFFECTIVE_DATE.CODE"),new String[] {CommonUtils.getDateString(lastSiteStatusHistory.getStartDate())});
}
if(lastSiteStatusHistory.getEndDate() != null){
// last history object should not have end date
throw getC3PRExceptionHelper().getRuntimeException(getCode("C3PR.EXCEPTION.STUDY.STUDYSITE.STATUS_HISTORY.END_DATE_PRESENT.CODE"),new String[] {lastSiteStatusHistory.getSiteStudyStatus().getDisplayName()});
}else{
Date suggestedEndDate = effectiveDate;
GregorianCalendar calendar = new GregorianCalendar();
calendar.setTime(suggestedEndDate);
calendar.add(calendar.DATE, -1);
lastSiteStatusHistory.setEndDate(calendar.getTime());
}
}
createSiteStatusHistory(effectiveDate, status);
}
@Transient
public SiteStudyStatus getSiteStudyStatus() {
return getSiteStudyStatus(new Date());
}
@Transient
public SiteStudyStatus getSiteStudyStatus(Date date) {
SiteStatusHistory siteStatusHistory = getSiteStatusHistory(date);
if(siteStatusHistory != null) {
return siteStatusHistory.getSiteStudyStatus();
}else {
createDefaultStudyStatusHistory();
return SiteStudyStatus.PENDING;
}
}
@Transient
public SiteStatusHistory getLatestSiteStatusHistory(){
TreeSet<SiteStatusHistory> siteStatusHistorySet = new TreeSet<SiteStatusHistory>();
siteStatusHistorySet.addAll(getSiteStatusHistory());
if(siteStatusHistorySet.size() > 0){
return siteStatusHistorySet.last();
}
return null;
}
@Transient
public SiteStatusHistory getSiteStatusHistory(Date date){
Date newDate = DateUtil.getUtilDateFromString(DateUtil.formatDate(date, "MM/dd/yyyy"), "MM/dd/yyyy");
List<SiteStatusHistory> siteStatusHistoryList = new ArrayList<SiteStatusHistory>();
siteStatusHistoryList.addAll(this.getSiteStatusHistory());
Collections.sort(siteStatusHistoryList);
for(SiteStatusHistory siteStatusHistory : siteStatusHistoryList){
Date startDate = siteStatusHistory.getStartDate();
Date endDate = siteStatusHistory.getEndDate();
if(!startDate.after(newDate) && (endDate == null ? true : !endDate.before(newDate))) {
return siteStatusHistory ;
}
}
return null ;
}
private void createDefaultStudyStatusHistory() {
Date currentDate = new Date();
GregorianCalendar calendar = new GregorianCalendar();
calendar.setTime(currentDate);
calendar.add(calendar.YEAR, -100);
SiteStatusHistory siteStatusHistory = new SiteStatusHistory();
siteStatusHistory.setStartDate(calendar.getTime());
siteStatusHistory.setSiteStudyStatus(SiteStudyStatus.PENDING);
this.addSiteStatusHistory(siteStatusHistory);
}
private void createSiteStatusHistory(Date startDate, SiteStudyStatus status) {
SiteStatusHistory siteStatusHistory = new SiteStatusHistory();
siteStatusHistory.setStartDate(startDate);
siteStatusHistory.setSiteStudyStatus(status);
this.addSiteStatusHistory(siteStatusHistory);
}
@OneToMany(mappedBy = "studySite", fetch = FetchType.LAZY)
@Cascade(value = { CascadeType.ALL, CascadeType.DELETE_ORPHAN })
@Where(clause = "retired_indicator = 'false'")
public List<SiteStatusHistory> getSiteStatusHistoryInternal() {
return lazyListHelper.getInternalList(SiteStatusHistory.class);
}
public void setSiteStatusHistoryInternal(final List<SiteStatusHistory> siteStatusHistory) {
lazyListHelper.setInternalList(SiteStatusHistory.class, siteStatusHistory);
}
@Transient
public List<SiteStatusHistory> getSiteStatusHistory() {
return lazyListHelper.getLazyList(SiteStatusHistory.class);
}
@Transient
public List<SiteStatusHistory> getSortedSiteStatusHistory() {
List<SiteStatusHistory> siteStatusHistoryList = new ArrayList<SiteStatusHistory>();
siteStatusHistoryList.addAll(getSiteStatusHistory());
Collections.sort(siteStatusHistoryList);
return siteStatusHistoryList;
}
public void setSiteStatusHistory(List<SiteStatusHistory> siteStatusHistory) {
setSiteStatusHistoryInternal(siteStatusHistory);
}
public void addSiteStatusHistory(SiteStatusHistory siteStatusHistory) {
siteStatusHistory.setStudySite(this);
getSiteStatusHistoryInternal().add(siteStatusHistory);
}
@Transient
public SiteStatusHistory getNextPossibleSiteStatusHistory(){
SiteStatusHistory siteHistory = getSiteStatusHistory(new Date());
if(siteHistory.getEndDate() != null){
List<SiteStatusHistory> listSiteStatusHistories = new ArrayList<SiteStatusHistory>();
listSiteStatusHistories.addAll(getSiteStatusHistory());
Collections.sort(listSiteStatusHistories);
int index = listSiteStatusHistories.indexOf(siteHistory);
if(listSiteStatusHistories.size() > index){
return listSiteStatusHistories.get(index + 1);
}
}
return null;
}
@Transient
public StudySiteStudyVersion getCurrentStudySiteStudyVersion(){
Date currentDate = new Date();
StudySiteStudyVersion currentSiteStudyVersion= getStudySiteStudyVersion(currentDate);
if(currentSiteStudyVersion == null){
List<StudySiteStudyVersion> listStudySiteStudyVersion = getSortedStudySiteStudyVersions();
return listStudySiteStudyVersion.get(listStudySiteStudyVersion.size() - 1);
}
return currentSiteStudyVersion;
}
}
|
unused imports
|
codebase/projects/core/src/java/edu/duke/cabig/c3pr/domain/StudySite.java
|
unused imports
|
|
Java
|
bsd-3-clause
|
51b494881fe634a7a3511e812c81347146ce307c
| 0
|
sammymax/nullpomino,sammymax/nullpomino
|
/*
Copyright (c) 2010, NullNoname
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of NullNoname nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
package org.game_host.hebo.nullpomino.game.subsystem.mode;
import java.util.Random;
import org.game_host.hebo.nullpomino.game.component.BGMStatus;
import org.game_host.hebo.nullpomino.game.component.Block;
import org.game_host.hebo.nullpomino.game.component.Controller;
import org.game_host.hebo.nullpomino.game.component.Field;
import org.game_host.hebo.nullpomino.game.component.Piece;
import org.game_host.hebo.nullpomino.game.event.EventReceiver;
import org.game_host.hebo.nullpomino.game.play.GameEngine;
import org.game_host.hebo.nullpomino.game.play.GameManager;
import org.game_host.hebo.nullpomino.util.CustomProperties;
import org.game_host.hebo.nullpomino.util.GeneralUtil;
/**
* AVALANCHE VS-BATTLE mode (beta)
*/
public class AvalancheVSMode extends DummyMode {
/** 現在のバージョン */
private static final int CURRENT_VERSION = 0;
/** Enabled piece types */
private static final int[] PIECE_ENABLE = {0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0};
/** Block colors */
private static final int[] BLOCK_COLORS =
{
Block.BLOCK_COLOR_RED,
Block.BLOCK_COLOR_GREEN,
Block.BLOCK_COLOR_BLUE,
Block.BLOCK_COLOR_YELLOW,
Block.BLOCK_COLOR_PURPLE
};
/** プレイヤーの数 */
private static final int MAX_PLAYERS = 2;
/** Names of ojama counter setting constants */
private final int OJAMA_COUNTER_OFF = 0, OJAMA_COUNTER_ON = 1, OJAMA_COUNTER_FEVER = 2;
/** 邪魔ブロックタイプの表示名 */
//private final String[] OJAMA_TYPE_STRING = {"NORMAL", "ONE RISE", "1-ATTACK"};
/** Names of ojama counter settings */
private final String[] OJAMA_COUNTER_STRING = {"OFF", "ON", "FEVER"};
/** 各プレイヤーの枠の色 */
private final int[] PLAYER_COLOR_FRAME = {GameEngine.FRAME_COLOR_RED, GameEngine.FRAME_COLOR_BLUE};
/** このモードを所有するGameManager */
private GameManager owner;
/** 描画などのイベント処理 */
private EventReceiver receiver;
/** Rule settings for countering ojama not yet dropped */
private int[] ojamaCounterMode;
/** 溜まっている邪魔ブロックの数 */
private int[] ojama;
/** 送った邪魔ブロックの数 */
private int[] ojamaSent;
/** 最後にスコア獲得してから経過した時間 */
private int[] scgettime;
/** 使用するBGM */
private int bgmno;
/** ビッグ */
private boolean[] big;
/** 効果音ON/OFF */
private boolean[] enableSE;
/** マップ使用フラグ */
private boolean[] useMap;
/** 使用するマップセット番号 */
private int[] mapSet;
/** マップ番号(-1でランダム) */
private int[] mapNumber;
/** 最後に使ったプリセット番号 */
private int[] presetNumber;
/** 勝者 */
private int winnerID;
/** マップセットのプロパティファイル */
private CustomProperties[] propMap;
/** 最大マップ番号 */
private int[] mapMaxNo;
/** バックアップ用フィールド(マップをリプレイに保存するときに使用) */
private Field[] fldBackup;
/** マップ選択用乱数 */
private Random randMap;
/** バージョン */
private int version;
/** Flag for all clear */
private boolean[] zenKeshi;
/** Amount of points earned from most recent clear */
private int[] lastscore, lastmultiplier;
/** Amount of ojama added in current chain */
private int[] ojamaAdd;
/** Score */
private int[] score;
/** Max amount of ojama dropped at once */
private int[] maxAttack;
/** Number of colors to use */
private int[] numColors;
/** Minimum chain count needed to send ojama */
private int[] rensaShibari;
/** Denominator for score-to-ojama conversion */
private int[] ojamaRate;
/** Settings for hard ojama blocks */
private int[] ojamaHard;
/** Hurryup開始までの秒数(0でHurryupなし) */
private int[] hurryupSeconds;
/** Fever points needed to enter Fever Mode */
private int[] feverThreshold;
/** Fever points */
private int[] feverPoints;
/** Fever time */
private int[] feverTime;
/** Minimum and maximum fever time */
private int[] feverTimeMin, feverTimeMax;
/** Flag set to true when player is in Fever Mode */
private boolean[] inFever;
/** Backup fields for Fever Mode */
private Field[] feverBackupField;
/** Second ojama counter for Fever Mode */
private int[] ojamaFever;
/** Set to true when opponent starts chain while in Fever Mode */
private boolean[] ojamaAddToFever;
/** Set to true when last drop resulted in a clear */
private boolean[] cleared;
/** Set to true when dropping ojama blocks */
private boolean[] ojamaDrop;
/*
* モード名
*/
@Override
public String getName() {
return "AVALANCHE VS-BATTLE (BETA)";
}
/*
* プレイヤー数
*/
@Override
public int getPlayers() {
return MAX_PLAYERS;
}
/*
* モードの初期化
*/
@Override
public void modeInit(GameManager manager) {
owner = manager;
receiver = owner.receiver;
ojamaCounterMode = new int[MAX_PLAYERS];
ojama = new int[MAX_PLAYERS];
ojamaSent = new int[MAX_PLAYERS];
scgettime = new int[MAX_PLAYERS];
bgmno = 0;
big = new boolean[MAX_PLAYERS];
enableSE = new boolean[MAX_PLAYERS];
hurryupSeconds = new int[MAX_PLAYERS];
useMap = new boolean[MAX_PLAYERS];
mapSet = new int[MAX_PLAYERS];
mapNumber = new int[MAX_PLAYERS];
presetNumber = new int[MAX_PLAYERS];
propMap = new CustomProperties[MAX_PLAYERS];
mapMaxNo = new int[MAX_PLAYERS];
fldBackup = new Field[MAX_PLAYERS];
randMap = new Random();
zenKeshi = new boolean[MAX_PLAYERS];
lastscore = new int[MAX_PLAYERS];
lastmultiplier = new int[MAX_PLAYERS];
ojamaAdd = new int[MAX_PLAYERS];
score = new int[MAX_PLAYERS];
numColors = new int[MAX_PLAYERS];
maxAttack = new int[MAX_PLAYERS];
rensaShibari = new int[MAX_PLAYERS];
ojamaRate = new int[MAX_PLAYERS];
ojamaHard = new int[MAX_PLAYERS];
feverThreshold = new int[MAX_PLAYERS];
feverPoints = new int[MAX_PLAYERS];
feverTime = new int[MAX_PLAYERS];
feverTimeMin = new int[MAX_PLAYERS];
feverTimeMax = new int[MAX_PLAYERS];
inFever = new boolean[MAX_PLAYERS];
feverBackupField = new Field[MAX_PLAYERS];
ojamaFever = new int[MAX_PLAYERS];
ojamaAddToFever = new boolean[MAX_PLAYERS];
cleared = new boolean[MAX_PLAYERS];
ojamaDrop = new boolean[MAX_PLAYERS];
winnerID = -1;
}
/**
* スピードプリセットを読み込み
* @param engine GameEngine
* @param prop 読み込み元のプロパティファイル
* @param preset プリセット番号
*/
private void loadPreset(GameEngine engine, CustomProperties prop, int preset) {
engine.speed.gravity = prop.getProperty("avalanchevs.gravity." + preset, 4);
engine.speed.denominator = prop.getProperty("avalanchevs.denominator." + preset, 256);
engine.speed.are = prop.getProperty("avalanchevs.are." + preset, 24);
engine.speed.areLine = prop.getProperty("avalanchevs.areLine." + preset, 24);
engine.speed.lineDelay = prop.getProperty("avalanchevs.lineDelay." + preset, 10);
engine.speed.lockDelay = prop.getProperty("avalanchevs.lockDelay." + preset, 30);
engine.speed.das = prop.getProperty("avalanchevs.das." + preset, 14);
}
/**
* スピードプリセットを保存
* @param engine GameEngine
* @param prop 保存先のプロパティファイル
* @param preset プリセット番号
*/
private void savePreset(GameEngine engine, CustomProperties prop, int preset) {
prop.setProperty("avalanchevs.gravity." + preset, engine.speed.gravity);
prop.setProperty("avalanchevs.denominator." + preset, engine.speed.denominator);
prop.setProperty("avalanchevs.are." + preset, engine.speed.are);
prop.setProperty("avalanchevs.areLine." + preset, engine.speed.areLine);
prop.setProperty("avalanchevs.lineDelay." + preset, engine.speed.lineDelay);
prop.setProperty("avalanchevs.lockDelay." + preset, engine.speed.lockDelay);
prop.setProperty("avalanchevs.das." + preset, engine.speed.das);
}
/**
* スピード以外の設定を読み込み
* @param engine GameEngine
* @param prop 読み込み元のプロパティファイル
*/
private void loadOtherSetting(GameEngine engine, CustomProperties prop) {
int playerID = engine.playerID;
bgmno = prop.getProperty("avalanchevs.bgmno", 0);
ojamaCounterMode[playerID] = prop.getProperty("avalanchevs.ojamaCounterMode", OJAMA_COUNTER_ON);
big[playerID] = prop.getProperty("avalanchevs.big.p" + playerID, false);
enableSE[playerID] = prop.getProperty("avalanchevs.enableSE.p" + playerID, true);
hurryupSeconds[playerID] = prop.getProperty("vsbattle.hurryupSeconds.p" + playerID, 192);
useMap[playerID] = prop.getProperty("avalanchevs.useMap.p" + playerID, false);
mapSet[playerID] = prop.getProperty("avalanchevs.mapSet.p" + playerID, 0);
mapNumber[playerID] = prop.getProperty("avalanchevs.mapNumber.p" + playerID, -1);
presetNumber[playerID] = prop.getProperty("avalanchevs.presetNumber.p" + playerID, 0);
maxAttack[playerID] = prop.getProperty("avalanchevs.maxAttack.p" + playerID, 30);
numColors[playerID] = prop.getProperty("avalanchevs.numColors.p" + playerID, 5);
rensaShibari[playerID] = prop.getProperty("avalanchevs.rensaShibari.p" + playerID, 1);
ojamaRate[playerID] = prop.getProperty("avalanchevs.ojamaRate.p" + playerID, 120);
ojamaHard[playerID] = prop.getProperty("avalanchevs.ojamaHard.p" + playerID, 0);
feverThreshold[playerID] = prop.getProperty("avalanchevs.feverThreshold.p" + playerID, 0);
feverTimeMin[playerID] = prop.getProperty("avalanchevs.feverTimeMin.p" + playerID, 15);
feverTimeMax[playerID] = prop.getProperty("avalanchevs.feverTimeMax.p" + playerID, 30);
}
/**
* スピード以外の設定を保存
* @param engine GameEngine
* @param prop 保存先のプロパティファイル
*/
private void saveOtherSetting(GameEngine engine, CustomProperties prop) {
int playerID = engine.playerID;
prop.setProperty("avalanchevs.bgmno", bgmno);
prop.setProperty("avalanchevs.ojamaCounterMode", ojamaCounterMode[playerID]);
prop.setProperty("avalanchevs.big.p" + playerID, big[playerID]);
prop.setProperty("avalanchevs.enableSE.p" + playerID, enableSE[playerID]);
prop.setProperty("vsbattle.hurryupSeconds.p" + playerID, hurryupSeconds[playerID]);
prop.setProperty("avalanchevs.useMap.p" + playerID, useMap[playerID]);
prop.setProperty("avalanchevs.mapSet.p" + playerID, mapSet[playerID]);
prop.setProperty("avalanchevs.mapNumber.p" + playerID, mapNumber[playerID]);
prop.setProperty("avalanchevs.presetNumber.p" + playerID, presetNumber[playerID]);
prop.setProperty("avalanchevs.maxAttack.p" + playerID, maxAttack[playerID]);
prop.setProperty("avalanchevs.numColors.p" + playerID, numColors[playerID]);
prop.setProperty("avalanchevs.rensaShibari.p" + playerID, rensaShibari[playerID]);
prop.setProperty("avalanchevs.ojamaRate.p" + playerID, ojamaRate[playerID]);
prop.setProperty("avalanchevs.ojamaHard.p" + playerID, ojamaHard[playerID]);
prop.setProperty("avalanchevs.feverThreshold.p" + playerID, feverThreshold[playerID]);
}
/**
* マップ読み込み
* @param field フィールド
* @param prop 読み込み元のプロパティファイル
* @param preset 任意のID
*/
private void loadMap(Field field, CustomProperties prop, int id) {
field.reset();
//field.readProperty(prop, id);
field.stringToField(prop.getProperty("map." + id, ""));
field.setAllAttribute(Block.BLOCK_ATTRIBUTE_VISIBLE, true);
field.setAllAttribute(Block.BLOCK_ATTRIBUTE_OUTLINE, true);
field.setAllAttribute(Block.BLOCK_ATTRIBUTE_SELFPLACED, false);
}
/**
* マップ保存
* @param field フィールド
* @param prop 保存先のプロパティファイル
* @param id 任意のID
*/
private void saveMap(Field field, CustomProperties prop, int id) {
//field.writeProperty(prop, id);
prop.setProperty("map." + id, field.fieldToString());
}
/**
* プレビュー用にマップを読み込み
* @param engine GameEngine
* @param playerID プレイヤー番号
* @param id マップID
* @param forceReload trueにするとマップファイルを強制再読み込み
*/
private void loadMapPreview(GameEngine engine, int playerID, int id, boolean forceReload) {
if((propMap[playerID] == null) || (forceReload)) {
mapMaxNo[playerID] = 0;
propMap[playerID] = receiver.loadProperties("config/map/vsbattle/" + mapSet[playerID] + ".map");
}
if((propMap[playerID] == null) && (engine.field != null)) {
engine.field.reset();
} else if(propMap[playerID] != null) {
mapMaxNo[playerID] = propMap[playerID].getProperty("map.maxMapNumber", 0);
engine.createFieldIfNeeded();
loadMap(engine.field, propMap[playerID], id);
engine.field.setAllSkin(engine.getSkin());
}
}
/*
* 各プレイヤーの初期化
*/
@Override
public void playerInit(GameEngine engine, int playerID) {
if(playerID == 1) {
engine.randSeed = owner.engine[0].randSeed;
engine.random = new Random(owner.engine[0].randSeed);
}
engine.framecolor = PLAYER_COLOR_FRAME[playerID];
engine.clearMode = GameEngine.CLEAR_COLOR;
engine.garbageColorClear = true;
engine.lineGravityType = GameEngine.LINE_GRAVITY_CASCADE;
for(int i = 0; i < Piece.PIECE_COUNT; i++)
engine.nextPieceEnable[i] = (PIECE_ENABLE[i] == 1);
engine.blockColors = BLOCK_COLORS;
engine.randomBlockColor = true;
engine.connectBlocks = false;
ojama[playerID] = 0;
ojamaSent[playerID] = 0;
score[playerID] = 0;
zenKeshi[playerID] = false;
scgettime[playerID] = 0;
feverPoints[playerID] = 0;
feverTime[playerID] = feverTimeMin[playerID] * 60;
inFever[playerID] = false;
feverBackupField[playerID] = null;
cleared[playerID] = false;
ojamaDrop[playerID] = false;
if(engine.owner.replayMode == false) {
loadOtherSetting(engine, engine.owner.modeConfig);
loadPreset(engine, engine.owner.modeConfig, -1 - playerID);
version = CURRENT_VERSION;
} else {
loadOtherSetting(engine, engine.owner.replayProp);
loadPreset(engine, engine.owner.replayProp, -1 - playerID);
version = owner.replayProp.getProperty("avalanchevs.version", 0);
}
}
/*
* 設定画面の処理
*/
@Override
public boolean onSetting(GameEngine engine, int playerID) {
// メニュー
if((engine.owner.replayMode == false) && (engine.statc[4] == 0)) {
// 上
if(engine.ctrl.isMenuRepeatKey(Controller.BUTTON_UP)) {
engine.statc[2]--;
if(engine.statc[2] < 0) engine.statc[2] = 24;
engine.playSE("cursor");
}
// 下
if(engine.ctrl.isMenuRepeatKey(Controller.BUTTON_DOWN)) {
engine.statc[2]++;
if(engine.statc[2] > 24) engine.statc[2] = 0;
engine.playSE("cursor");
}
// 設定変更
int change = 0;
if(engine.ctrl.isMenuRepeatKey(Controller.BUTTON_LEFT)) change = -1;
if(engine.ctrl.isMenuRepeatKey(Controller.BUTTON_RIGHT)) change = 1;
if(change != 0) {
engine.playSE("change");
int m = 1;
if(engine.ctrl.isPress(Controller.BUTTON_E)) m = 100;
if(engine.ctrl.isPress(Controller.BUTTON_F)) m = 1000;
switch(engine.statc[2]) {
case 0:
engine.speed.gravity += change * m;
if(engine.speed.gravity < -1) engine.speed.gravity = 99999;
if(engine.speed.gravity > 99999) engine.speed.gravity = -1;
break;
case 1:
engine.speed.denominator += change * m;
if(engine.speed.denominator < -1) engine.speed.denominator = 99999;
if(engine.speed.denominator > 99999) engine.speed.denominator = -1;
break;
case 2:
engine.speed.are += change;
if(engine.speed.are < 0) engine.speed.are = 99;
if(engine.speed.are > 99) engine.speed.are = 0;
break;
case 3:
engine.speed.areLine += change;
if(engine.speed.areLine < 0) engine.speed.areLine = 99;
if(engine.speed.areLine > 99) engine.speed.areLine = 0;
break;
case 4:
engine.speed.lineDelay += change;
if(engine.speed.lineDelay < 0) engine.speed.lineDelay = 99;
if(engine.speed.lineDelay > 99) engine.speed.lineDelay = 0;
break;
case 5:
engine.speed.lockDelay += change;
if(engine.speed.lockDelay < 0) engine.speed.lockDelay = 99;
if(engine.speed.lockDelay > 99) engine.speed.lockDelay = 0;
break;
case 6:
engine.speed.das += change;
if(engine.speed.das < 0) engine.speed.das = 99;
if(engine.speed.das > 99) engine.speed.das = 0;
break;
case 7:
case 8:
presetNumber[playerID] += change;
if(presetNumber[playerID] < 0) presetNumber[playerID] = 99;
if(presetNumber[playerID] > 99) presetNumber[playerID] = 0;
break;
case 9:
ojamaCounterMode[playerID] += change;
if(ojamaCounterMode[playerID] < 0) ojamaCounterMode[playerID] = 2;
if(ojamaCounterMode[playerID] > 2) ojamaCounterMode[playerID] = 0;
break;
case 10:
maxAttack[playerID] += change;
if(maxAttack[playerID] < 0) maxAttack[playerID] = 99;
if(maxAttack[playerID] > 99) maxAttack[playerID] = 0;
break;
case 11:
numColors[playerID] += change;
if(numColors[playerID] < 3) numColors[playerID] = 5;
if(numColors[playerID] > 5) numColors[playerID] = 3;
break;
case 12:
rensaShibari[playerID] += change;
if(rensaShibari[playerID] < 1) rensaShibari[playerID] = 20;
if(rensaShibari[playerID] > 20) rensaShibari[playerID] = 1;
break;
case 13:
ojamaRate[playerID] += change*10;
if(ojamaRate[playerID] < 10) ojamaRate[playerID] = 1000;
if(ojamaRate[playerID] > 1000) ojamaRate[playerID] = 10;
break;
case 14:
big[playerID] = !big[playerID];
break;
case 15:
enableSE[playerID] = !enableSE[playerID];
break;
case 16:
hurryupSeconds[playerID] += change;
if(hurryupSeconds[playerID] < 0) hurryupSeconds[playerID] = 300;
if(hurryupSeconds[playerID] > 300) hurryupSeconds[playerID] = 0;
break;
case 17:
ojamaHard[playerID] += change;
if(ojamaHard[playerID] < 0) ojamaHard[playerID] = 9;
if(ojamaHard[playerID] > 9) ojamaHard[playerID] = 0;
break;
case 18:
bgmno += change;
if(bgmno < 0) bgmno = BGMStatus.BGM_COUNT - 1;
if(bgmno > BGMStatus.BGM_COUNT - 1) bgmno = 0;
break;
case 19:
useMap[playerID] = !useMap[playerID];
if(!useMap[playerID]) {
if(engine.field != null) engine.field.reset();
} else {
loadMapPreview(engine, playerID, (mapNumber[playerID] < 0) ? 0 : mapNumber[playerID], true);
}
break;
case 20:
mapSet[playerID] += change;
if(mapSet[playerID] < 0) mapSet[playerID] = 99;
if(mapSet[playerID] > 99) mapSet[playerID] = 0;
if(useMap[playerID]) {
mapNumber[playerID] = -1;
loadMapPreview(engine, playerID, (mapNumber[playerID] < 0) ? 0 : mapNumber[playerID], true);
}
break;
case 21:
if(useMap[playerID]) {
mapNumber[playerID] += change;
if(mapNumber[playerID] < -1) mapNumber[playerID] = mapMaxNo[playerID] - 1;
if(mapNumber[playerID] > mapMaxNo[playerID] - 1) mapNumber[playerID] = -1;
loadMapPreview(engine, playerID, (mapNumber[playerID] < 0) ? 0 : mapNumber[playerID], true);
} else {
mapNumber[playerID] = -1;
}
break;
case 22:
feverThreshold[playerID] += change;
if(feverThreshold[playerID] < 0) feverThreshold[playerID] = 9;
if(feverThreshold[playerID] > 9) feverThreshold[playerID] = 0;
break;
case 23:
feverTimeMin[playerID] += change;
if(feverTimeMin[playerID] < 1) feverTimeMin[playerID] = feverTimeMax[playerID];
if(feverTimeMin[playerID] > feverTimeMax[playerID]) feverTimeMin[playerID] = 1;
break;
case 24:
feverTimeMax[playerID] += change;
if(feverTimeMax[playerID] < feverTimeMin[playerID]) feverTimeMax[playerID] = 99;
if(feverTimeMax[playerID] > 99) feverTimeMax[playerID] = feverTimeMin[playerID];
break;
}
}
// 決定
if(engine.ctrl.isPush(Controller.BUTTON_A) && (engine.statc[3] >= 5)) {
engine.playSE("decide");
if(engine.statc[2] == 7) {
loadPreset(engine, owner.modeConfig, presetNumber[playerID]);
} else if(engine.statc[2] == 8) {
savePreset(engine, owner.modeConfig, presetNumber[playerID]);
receiver.saveModeConfig(owner.modeConfig);
} else {
saveOtherSetting(engine, owner.modeConfig);
savePreset(engine, owner.modeConfig, -1 - playerID);
receiver.saveModeConfig(owner.modeConfig);
engine.statc[4] = 1;
}
}
// キャンセル
if(engine.ctrl.isPush(Controller.BUTTON_B)) {
engine.quitflag = true;
}
// プレビュー用マップ読み込み
if(useMap[playerID] && (engine.statc[3] == 0)) {
loadMapPreview(engine, playerID, (mapNumber[playerID] < 0) ? 0 : mapNumber[playerID], true);
}
// ランダムマッププレビュー
if(useMap[playerID] && (propMap[playerID] != null) && (mapNumber[playerID] < 0)) {
if(engine.statc[3] % 30 == 0) {
engine.statc[5]++;
if(engine.statc[5] >= mapMaxNo[playerID]) engine.statc[5] = 0;
loadMapPreview(engine, playerID, engine.statc[5], false);
}
}
engine.statc[3]++;
} else if(engine.statc[4] == 0) {
engine.statc[3]++;
engine.statc[2] = 0;
if(engine.statc[3] >= 60) {
engine.statc[2] = 9;
}
if(engine.statc[3] >= 120) {
engine.statc[4] = 1;
}
} else {
// 開始
if((owner.engine[0].statc[4] == 1) && (owner.engine[1].statc[4] == 1) && (playerID == 1)) {
owner.engine[0].stat = GameEngine.STAT_READY;
owner.engine[1].stat = GameEngine.STAT_READY;
owner.engine[0].resetStatc();
owner.engine[1].resetStatc();
}
// キャンセル
else if(engine.ctrl.isPush(Controller.BUTTON_B)) {
engine.statc[4] = 0;
}
}
return true;
}
/*
* 設定画面の描画
*/
@Override
public void renderSetting(GameEngine engine, int playerID) {
if(engine.statc[4] == 0) {
if(engine.statc[2] < 9) {
if(owner.replayMode == false) {
receiver.drawMenuFont(engine, playerID, 0, (engine.statc[2] * 2) + 1, "b",
(playerID == 0) ? EventReceiver.COLOR_RED : EventReceiver.COLOR_BLUE);
}
receiver.drawMenuFont(engine, playerID, 0, 0, "GRAVITY", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 1, String.valueOf(engine.speed.gravity), (engine.statc[2] == 0));
receiver.drawMenuFont(engine, playerID, 0, 2, "G-MAX", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 3, String.valueOf(engine.speed.denominator), (engine.statc[2] == 1));
receiver.drawMenuFont(engine, playerID, 0, 4, "ARE", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 5, String.valueOf(engine.speed.are), (engine.statc[2] == 2));
receiver.drawMenuFont(engine, playerID, 0, 6, "ARE LINE", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 7, String.valueOf(engine.speed.areLine), (engine.statc[2] == 3));
receiver.drawMenuFont(engine, playerID, 0, 8, "LINE DELAY", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 9, String.valueOf(engine.speed.lineDelay), (engine.statc[2] == 4));
receiver.drawMenuFont(engine, playerID, 0, 10, "LOCK DELAY", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 11, String.valueOf(engine.speed.lockDelay), (engine.statc[2] == 5));
receiver.drawMenuFont(engine, playerID, 0, 12, "DAS", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 13, String.valueOf(engine.speed.das), (engine.statc[2] == 6));
receiver.drawMenuFont(engine, playerID, 0, 14, "LOAD", EventReceiver.COLOR_GREEN);
receiver.drawMenuFont(engine, playerID, 1, 15, String.valueOf(presetNumber[playerID]), (engine.statc[2] == 7));
receiver.drawMenuFont(engine, playerID, 0, 16, "SAVE", EventReceiver.COLOR_GREEN);
receiver.drawMenuFont(engine, playerID, 1, 17, String.valueOf(presetNumber[playerID]), (engine.statc[2] == 8));
} else if(engine.statc[2] < 19) {
if(owner.replayMode == false) {
receiver.drawMenuFont(engine, playerID, 0, ((engine.statc[2] - 9) * 2) + 1, "b",
(playerID == 0) ? EventReceiver.COLOR_RED : EventReceiver.COLOR_BLUE);
}
receiver.drawMenuFont(engine, playerID, 0, 0, "COUNTER", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 1, OJAMA_COUNTER_STRING[ojamaCounterMode[playerID]], (engine.statc[2] == 9));
receiver.drawMenuFont(engine, playerID, 0, 2, "MAX ATTACK", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 3, String.valueOf(maxAttack[playerID]), (engine.statc[2] == 10));
receiver.drawMenuFont(engine, playerID, 0, 4, "COLORS", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 5, String.valueOf(numColors[playerID]), (engine.statc[2] == 11));
receiver.drawMenuFont(engine, playerID, 0, 6, "MIN CHAIN", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 7, String.valueOf(rensaShibari[playerID]), (engine.statc[2] == 12));
receiver.drawMenuFont(engine, playerID, 0, 8, "OJAMA RATE", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 9, String.valueOf(ojamaRate[playerID]), (engine.statc[2] == 13));
receiver.drawMenuFont(engine, playerID, 0, 10, "BIG", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 11, GeneralUtil.getONorOFF(big[playerID]), (engine.statc[2] == 14));
receiver.drawMenuFont(engine, playerID, 0, 12, "SE", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 13, GeneralUtil.getONorOFF(enableSE[playerID]), (engine.statc[2] == 15));
receiver.drawMenuFont(engine, playerID, 0, 14, "HURRYUP", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 15, (hurryupSeconds[playerID] == 0) ? "NONE" : hurryupSeconds[playerID]+"SEC",
(engine.statc[2] == 16));
receiver.drawMenuFont(engine, playerID, 0, 16, "HARD OJAMA", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 17, String.valueOf(ojamaHard[playerID]), (engine.statc[2] == 17));
receiver.drawMenuFont(engine, playerID, 0, 18, "BGM", EventReceiver.COLOR_PINK);
receiver.drawMenuFont(engine, playerID, 1, 19, String.valueOf(bgmno), (engine.statc[2] == 18));
} else {
if(owner.replayMode == false) {
receiver.drawMenuFont(engine, playerID, 0, ((engine.statc[2] - 19) * 2) + 1, "b",
(playerID == 0) ? EventReceiver.COLOR_RED : EventReceiver.COLOR_BLUE);
}
receiver.drawMenuFont(engine, playerID, 0, 0, "USE MAP", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 1, GeneralUtil.getONorOFF(useMap[playerID]), (engine.statc[2] == 19));
receiver.drawMenuFont(engine, playerID, 0, 2, "MAP SET", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 3, String.valueOf(mapSet[playerID]), (engine.statc[2] == 20));
receiver.drawMenuFont(engine, playerID, 0, 4, "MAP NO.", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 5, (mapNumber[playerID] < 0) ? "RANDOM" : mapNumber[playerID]+"/"+(mapMaxNo[playerID]-1),
(engine.statc[2] == 21));
receiver.drawMenuFont(engine, playerID, 0, 6, "FEVER", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 7, (feverThreshold[playerID] == 0) ? "NONE" : feverThreshold[playerID]+" PTS",
(engine.statc[2] == 22));
receiver.drawMenuFont(engine, playerID, 0, 8, "F-MIN TIME", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 9, feverTimeMin[playerID] + "SEC", (engine.statc[2] == 23));
receiver.drawMenuFont(engine, playerID, 0, 10, "F-MAX TIME", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 11, feverTimeMax[playerID] + "SEC", (engine.statc[2] == 24));
}
} else {
receiver.drawMenuFont(engine, playerID, 3, 10, "WAIT", EventReceiver.COLOR_YELLOW);
}
}
/*
* Readyの時の初期化処理(初期化前)
*/
@Override
public boolean onReady(GameEngine engine, int playerID) {
if(engine.statc[0] == 0) {
engine.numColors = numColors[playerID];
feverTime[playerID] = feverTimeMin[playerID] * 60;
// マップ読み込み・リプレイ保存用にバックアップ
if(useMap[playerID]) {
if(owner.replayMode) {
engine.createFieldIfNeeded();
loadMap(engine.field, owner.replayProp, playerID);
engine.field.setAllSkin(engine.getSkin());
} else {
if(propMap[playerID] == null) {
propMap[playerID] = receiver.loadProperties("config/map/vsbattle/" + mapSet[playerID] + ".map");
}
if(propMap[playerID] != null) {
engine.createFieldIfNeeded();
if(mapNumber[playerID] < 0) {
if((playerID == 1) && (useMap[0]) && (mapNumber[0] < 0)) {
engine.field.copy(owner.engine[0].field);
} else {
int no = (mapMaxNo[playerID] < 1) ? 0 : randMap.nextInt(mapMaxNo[playerID]);
loadMap(engine.field, propMap[playerID], no);
}
} else {
loadMap(engine.field, propMap[playerID], mapNumber[playerID]);
}
engine.field.setAllSkin(engine.getSkin());
fldBackup[playerID] = new Field(engine.field);
}
}
} else if(engine.field != null) {
engine.field.reset();
}
}
return false;
}
/*
* ゲーム開始時の処理
*/
@Override
public void startGame(GameEngine engine, int playerID) {
engine.b2bEnable = false;
engine.comboType = GameEngine.COMBO_TYPE_DISABLE;
engine.big = big[playerID];
engine.enableSE = enableSE[playerID];
if(playerID == 1) owner.bgmStatus.bgm = bgmno;
engine.colorClearSize = big[playerID] ? 12 : 4;
engine.tspinAllowKick = false;
engine.tspinEnable = false;
engine.useAllSpinBonus = false;
}
/*
* スコア表示
*/
@Override
public void renderLast(GameEngine engine, int playerID) {
// ステータス表示
if(playerID == 0) {
receiver.drawScoreFont(engine, playerID, -1, 0, "AVALANCHE VS", EventReceiver.COLOR_GREEN);
receiver.drawScoreFont(engine, playerID, -1, 2, "OJAMA", EventReceiver.COLOR_PURPLE);
String ojamaStr1P = String.valueOf(ojama[0]);
if (ojamaAdd[0] > 0 && !(inFever[0] && ojamaAddToFever[0]))
ojamaStr1P = ojamaStr1P + "(+" + String.valueOf(ojamaAdd[0]) + ")";
String ojamaStr2P = String.valueOf(ojama[1]);
if (ojamaAdd[1] > 0 && !(inFever[1] && ojamaAddToFever[1]))
ojamaStr2P = ojamaStr2P + "(+" + String.valueOf(ojamaAdd[1]) + ")";
receiver.drawScoreFont(engine, playerID, -1, 3, "1P:", EventReceiver.COLOR_RED);
receiver.drawScoreFont(engine, playerID, 3, 3, ojamaStr1P, (ojama[0] > 0));
receiver.drawScoreFont(engine, playerID, -1, 4, "2P:", EventReceiver.COLOR_BLUE);
receiver.drawScoreFont(engine, playerID, 3, 4, ojamaStr2P, (ojama[1] > 0));
receiver.drawScoreFont(engine, playerID, -1, 6, "ATTACK", EventReceiver.COLOR_GREEN);
receiver.drawScoreFont(engine, playerID, -1, 7, "1P: " + String.valueOf(ojamaSent[0]), EventReceiver.COLOR_RED);
receiver.drawScoreFont(engine, playerID, -1, 8, "2P: " + String.valueOf(ojamaSent[1]), EventReceiver.COLOR_BLUE);
receiver.drawScoreFont(engine, playerID, -1, 10, "SCORE", EventReceiver.COLOR_PURPLE);
receiver.drawScoreFont(engine, playerID, -1, 11, "1P: " + String.valueOf(score[0]), EventReceiver.COLOR_RED);
receiver.drawScoreFont(engine, playerID, -1, 12, "2P: " + String.valueOf(score[1]), EventReceiver.COLOR_BLUE);
receiver.drawScoreFont(engine, playerID, -1, 14, "TIME", EventReceiver.COLOR_GREEN);
receiver.drawScoreFont(engine, playerID, -1, 15, GeneralUtil.getTime(engine.statistics.time));
if (inFever[0] || inFever[1])
{
receiver.drawScoreFont(engine, playerID, -1, 17, "FEVER OJAMA", EventReceiver.COLOR_PURPLE);
String ojamaFeverStr1P = String.valueOf(ojamaFever[0]);
if (ojamaAdd[0] > 0 && inFever[0] && ojamaAddToFever[0])
ojamaFeverStr1P = ojamaFeverStr1P + "(+" + String.valueOf(ojamaAdd[0]) + ")";
String ojamaFeverStr2P = String.valueOf(ojamaFever[1]);
if (ojamaAdd[1] > 0 && inFever[1] && ojamaAddToFever[1])
ojamaFeverStr2P = ojamaFeverStr2P + "(+" + String.valueOf(ojamaAdd[1]) + ")";
receiver.drawScoreFont(engine, playerID, -1, 18, "1P:", EventReceiver.COLOR_RED);
receiver.drawScoreFont(engine, playerID, 3, 18, ojamaFeverStr1P, (ojamaFever[0] > 0));
receiver.drawScoreFont(engine, playerID, -1, 19, "2P:", EventReceiver.COLOR_BLUE);
receiver.drawScoreFont(engine, playerID, 3, 19, ojamaFeverStr2P, (ojamaFever[1] > 0));
}
}
if (!owner.engine[playerID].gameActive)
return;
int playerColor = (playerID == 0) ? EventReceiver.COLOR_RED : EventReceiver.COLOR_BLUE;
if (feverThreshold[playerID] > 0)
{
receiver.drawMenuFont(engine, playerID, 0, 17, "FEVER POINT", playerColor);
receiver.drawMenuFont(engine, playerID, 0, 18, feverPoints[playerID] + " / " + feverThreshold[playerID], inFever[playerID]);
receiver.drawMenuFont(engine, playerID, 0, 19, "FEVER TIME", playerColor);
receiver.drawMenuFont(engine, playerID, 0, 20, GeneralUtil.getTime(feverTime[playerID]), inFever[playerID]);
}
if(zenKeshi[playerID])
receiver.drawMenuFont(engine, playerID, 1, 21, "ZENKESHI!", EventReceiver.COLOR_YELLOW);
if (ojamaHard[playerID] > 0 && engine.field != null)
for (int x = 0; x < engine.field.getWidth(); x++)
for (int y = 0; y < engine.field.getHeight(); y++)
{
int hard = engine.field.getBlock(x, y).hard;
if (hard > 0)
receiver.drawMenuFont(engine, playerID, x, y, String.valueOf(hard), EventReceiver.COLOR_YELLOW);
}
}
/*
* スコア計算
*/
@Override
public void calcScore(GameEngine engine, int playerID, int avalanche) {
int enemyID = 0;
if(playerID == 0) enemyID = 1;
if (big[playerID])
avalanche >>= 2;
// ラインクリアボーナス
int pts = avalanche*10;
int ojamaNew = 0;
if (avalanche > 0) {
cleared[playerID] = true;
if (zenKeshi[playerID])
ojamaNew += 30;
if (engine.field.isEmpty()) {
engine.playSE("bravo");
zenKeshi[playerID] = true;
engine.statistics.score += 2100;
}
else
zenKeshi[playerID] = false;
int chain = engine.chain;
engine.playSE("combo" + Math.min(chain, 20));
if (chain == 1)
ojamaAddToFever[enemyID] = inFever[enemyID];
int multiplier = engine.field.colorClearExtraCount;
if (big[playerID])
multiplier >>= 2;
if (engine.field.colorsCleared > 1)
multiplier += (engine.field.colorsCleared-1)*2;
/*
if (multiplier < 0)
multiplier = 0;
if (chain == 0)
firstExtra = avalanche > engine.colorClearSize;
*/
if (chain == 2)
multiplier += 8;
else if (chain == 3)
multiplier += 16;
else if (chain >= 4)
multiplier += 32*(chain-3);
/*
if (firstExtra)
multiplier++;
*/
if (multiplier > 999)
multiplier = 999;
if (multiplier < 1)
multiplier = 1;
lastscore[playerID] = pts;
lastmultiplier[playerID] = multiplier;
scgettime[playerID] = 120;
int ptsTotal = pts*multiplier;
score[playerID] += ptsTotal;
if (hurryupSeconds[playerID] > 0 && engine.statistics.time > hurryupSeconds[playerID])
ptsTotal <<= engine.statistics.time / (hurryupSeconds[playerID] * 60);
ojamaNew += (ptsTotal+ojamaRate[playerID]-1)/ojamaRate[playerID];
if (chain >= rensaShibari[playerID])
{
ojamaSent[playerID] += ojamaNew;
if (ojamaCounterMode[playerID] != OJAMA_COUNTER_OFF)
{
boolean countered = false;
if (inFever[playerID])
{
if (ojamaFever[playerID] > 0 && ojamaNew > 0)
{
int delta = Math.min(ojamaFever[playerID], ojamaNew);
ojamaFever[playerID] -= delta;
ojamaNew -= delta;
countered = true;
}
if (ojamaAdd[playerID] > 0 && ojamaNew > 0)
{
int delta = Math.min(ojamaAdd[playerID], ojamaNew);
ojamaAdd[playerID] -= delta;
ojamaNew -= delta;
countered = true;
}
}
if (ojama[playerID] > 0 && ojamaNew > 0)
{
int delta = Math.min(ojama[playerID], ojamaNew);
ojama[playerID] -= delta;
ojamaNew -= delta;
countered = true;
}
if (ojamaAdd[playerID] > 0 && ojamaNew > 0)
{
int delta = Math.min(ojamaAdd[playerID], ojamaNew);
ojamaAdd[playerID] -= delta;
ojamaNew -= delta;
countered = true;
}
if (countered)
{
if (feverThreshold[playerID] > 0 && feverThreshold[playerID] > feverPoints[playerID])
feverPoints[playerID]++;
if (feverThreshold[enemyID] > 0 && !inFever[enemyID])
feverTime[enemyID] = Math.min(feverTime[enemyID]+60,feverTimeMax[enemyID]*60);
}
}
if (ojamaNew > 0)
ojamaAdd[enemyID] += ojamaNew;
}
}
else if (!engine.field.canCascade())
cleared[playerID] = false;
}
public boolean lineClearEnd(GameEngine engine, int playerID) {
int enemyID = 0;
if(playerID == 0) enemyID = 1;
if (ojamaAdd[enemyID] > 0)
{
if (ojamaAddToFever[enemyID] && inFever[enemyID])
ojamaFever[enemyID] += ojamaAdd[enemyID];
else
ojama[enemyID] += ojamaAdd[enemyID];
ojamaAdd[enemyID] = 0;
}
checkFeverEnd(engine, playerID);
int ojamaNow = inFever[playerID] ? ojamaFever[playerID] : ojama[playerID];
if (ojamaNow > 0 && !ojamaDrop[playerID] &&
(!cleared[playerID] || ojamaCounterMode[playerID] != OJAMA_COUNTER_FEVER))
{
ojamaDrop[playerID] = true;
int drop = Math.min(ojamaNow, maxAttack[playerID]);
if (inFever[playerID])
ojamaFever[playerID] -= drop;
else
ojama[playerID] -= drop;
engine.field.garbageDrop(engine, drop, big[playerID], ojamaHard[playerID]);
return true;
}
checkFeverStart(engine, playerID);
return false;
}
private void checkFeverStart(GameEngine engine, int playerID)
{
if (!inFever[playerID] && feverPoints[playerID] >= feverThreshold[playerID] && feverThreshold[playerID] > 0)
{
inFever[playerID] = true;
feverBackupField[playerID] = engine.field;
engine.field = null;
engine.createFieldIfNeeded();
//TODO: Add preset chain.
}
}
private void checkFeverEnd(GameEngine engine, int playerID)
{
if (inFever[playerID] && feverTime[playerID] == 0)
{
inFever[playerID] = false;
feverTime[playerID] = feverTimeMin[playerID] * 60;
feverPoints[playerID] = 0;
engine.field = feverBackupField[playerID];
ojama[playerID] += ojamaFever[playerID];
ojamaFever[playerID] = 0;
ojamaAddToFever[playerID] = false;
}
}
/*
* 各フレームの最後の処理
*/
@Override
public void onLast(GameEngine engine, int playerID) {
scgettime[playerID]++;
if (inFever[playerID] && feverTime[playerID] > 0)
{
if (feverTime[playerID] == 1)
engine.playSE("levelstop");
feverTime[playerID]--;
}
if (engine.stat == GameEngine.STAT_MOVE)
ojamaDrop[playerID] = false;
int width = 1;
if (engine.field != null)
width = engine.field.getWidth();
int blockHeight = receiver.getBlockGraphicsHeight(engine, playerID);
// せり上がりメーター
if(ojama[playerID] * blockHeight / width > engine.meterValue) {
engine.meterValue++;
} else if(ojama[playerID] * blockHeight / width < engine.meterValue) {
engine.meterValue--;
}
if(ojama[playerID] >= 5*width) engine.meterColor = GameEngine.METER_COLOR_RED;
else if(ojama[playerID] >= width) engine.meterColor = GameEngine.METER_COLOR_ORANGE;
else if(ojama[playerID] >= 1) engine.meterColor = GameEngine.METER_COLOR_YELLOW;
else engine.meterColor = GameEngine.METER_COLOR_GREEN;
// 決着
if((playerID == 1) && (owner.engine[0].gameActive)) {
if((owner.engine[0].stat == GameEngine.STAT_GAMEOVER) && (owner.engine[1].stat == GameEngine.STAT_GAMEOVER)) {
// 引き分け
winnerID = -1;
owner.engine[0].gameActive = false;
owner.engine[1].gameActive = false;
owner.bgmStatus.bgm = BGMStatus.BGM_NOTHING;
} else if((owner.engine[0].stat != GameEngine.STAT_GAMEOVER) && (owner.engine[1].stat == GameEngine.STAT_GAMEOVER)) {
// 1P勝利
winnerID = 0;
owner.engine[0].gameActive = false;
owner.engine[1].gameActive = false;
owner.engine[0].stat = GameEngine.STAT_EXCELLENT;
owner.engine[0].resetStatc();
owner.engine[0].statc[1] = 1;
owner.bgmStatus.bgm = BGMStatus.BGM_NOTHING;
} else if((owner.engine[0].stat == GameEngine.STAT_GAMEOVER) && (owner.engine[1].stat != GameEngine.STAT_GAMEOVER)) {
// 2P勝利
winnerID = 1;
owner.engine[0].gameActive = false;
owner.engine[1].gameActive = false;
owner.engine[1].stat = GameEngine.STAT_EXCELLENT;
owner.engine[1].resetStatc();
owner.engine[1].statc[1] = 1;
owner.bgmStatus.bgm = BGMStatus.BGM_NOTHING;
}
}
}
/*
* 結果画面の描画
*/
@Override
public void renderResult(GameEngine engine, int playerID) {
receiver.drawMenuFont(engine, playerID, 0, 1, "RESULT", EventReceiver.COLOR_ORANGE);
if(winnerID == -1) {
receiver.drawMenuFont(engine, playerID, 6, 2, "DRAW", EventReceiver.COLOR_GREEN);
} else if(winnerID == playerID) {
receiver.drawMenuFont(engine, playerID, 6, 2, "WIN!", EventReceiver.COLOR_YELLOW);
} else {
receiver.drawMenuFont(engine, playerID, 6, 2, "LOSE", EventReceiver.COLOR_WHITE);
}
receiver.drawMenuFont(engine, playerID, 0, 3, "ATTACK", EventReceiver.COLOR_ORANGE);
String strScore = String.format("%10d", ojamaSent[playerID]);
receiver.drawMenuFont(engine, playerID, 0, 4, strScore);
receiver.drawMenuFont(engine, playerID, 0, 5, "LINE", EventReceiver.COLOR_ORANGE);
String strLines = String.format("%10d", engine.statistics.lines);
receiver.drawMenuFont(engine, playerID, 0, 6, strLines);
receiver.drawMenuFont(engine, playerID, 0, 7, "PIECE", EventReceiver.COLOR_ORANGE);
String strPiece = String.format("%10d", engine.statistics.totalPieceLocked);
receiver.drawMenuFont(engine, playerID, 0, 8, strPiece);
receiver.drawMenuFont(engine, playerID, 0, 9, "ATTACK/MIN", EventReceiver.COLOR_ORANGE);
float apm = (float)(ojamaSent[playerID] * 3600) / (float)(engine.statistics.time);
String strAPM = String.format("%10g", apm);
receiver.drawMenuFont(engine, playerID, 0, 10, strAPM);
receiver.drawMenuFont(engine, playerID, 0, 11, "LINE/MIN", EventReceiver.COLOR_ORANGE);
String strLPM = String.format("%10g", engine.statistics.lpm);
receiver.drawMenuFont(engine, playerID, 0, 12, strLPM);
receiver.drawMenuFont(engine, playerID, 0, 13, "PIECE/SEC", EventReceiver.COLOR_ORANGE);
String strPPS = String.format("%10g", engine.statistics.pps);
receiver.drawMenuFont(engine, playerID, 0, 14, strPPS);
receiver.drawMenuFont(engine, playerID, 0, 15, "TIME", EventReceiver.COLOR_ORANGE);
String strTime = String.format("%10s", GeneralUtil.getTime(owner.engine[0].statistics.time));
receiver.drawMenuFont(engine, playerID, 0, 16, strTime);
}
/*
* リプレイ保存時の処理
*/
@Override
public void saveReplay(GameEngine engine, int playerID, CustomProperties prop) {
saveOtherSetting(engine, owner.replayProp);
savePreset(engine, owner.replayProp, -1 - playerID);
if(useMap[playerID] && (fldBackup[playerID] != null)) {
saveMap(fldBackup[playerID], owner.replayProp, playerID);
}
owner.replayProp.setProperty("avalanchevs.version", version);
}
}
|
src/org/game_host/hebo/nullpomino/game/subsystem/mode/AvalancheVSMode.java
|
/*
Copyright (c) 2010, NullNoname
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of NullNoname nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
package org.game_host.hebo.nullpomino.game.subsystem.mode;
import java.util.Random;
import org.game_host.hebo.nullpomino.game.component.BGMStatus;
import org.game_host.hebo.nullpomino.game.component.Block;
import org.game_host.hebo.nullpomino.game.component.Controller;
import org.game_host.hebo.nullpomino.game.component.Field;
import org.game_host.hebo.nullpomino.game.component.Piece;
import org.game_host.hebo.nullpomino.game.event.EventReceiver;
import org.game_host.hebo.nullpomino.game.play.GameEngine;
import org.game_host.hebo.nullpomino.game.play.GameManager;
import org.game_host.hebo.nullpomino.util.CustomProperties;
import org.game_host.hebo.nullpomino.util.GeneralUtil;
/**
* AVALANCHE VS-BATTLE mode (beta)
*/
public class AvalancheVSMode extends DummyMode {
/** 現在のバージョン */
private static final int CURRENT_VERSION = 0;
/** Enabled piece types */
private static final int[] PIECE_ENABLE = {0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0};
/** Block colors */
private static final int[] BLOCK_COLORS =
{
Block.BLOCK_COLOR_RED,
Block.BLOCK_COLOR_GREEN,
Block.BLOCK_COLOR_BLUE,
Block.BLOCK_COLOR_YELLOW,
Block.BLOCK_COLOR_PURPLE
};
/** プレイヤーの数 */
private static final int MAX_PLAYERS = 2;
/** Names of ojama counter setting constants */
private final int OJAMA_COUNTER_OFF = 0, OJAMA_COUNTER_ON = 1, OJAMA_COUNTER_FEVER = 2;
/** 邪魔ブロックタイプの表示名 */
//private final String[] OJAMA_TYPE_STRING = {"NORMAL", "ONE RISE", "1-ATTACK"};
/** Names of ojama counter settings */
private final String[] OJAMA_COUNTER_STRING = {"OFF", "ON", "FEVER"};
/** 各プレイヤーの枠の色 */
private final int[] PLAYER_COLOR_FRAME = {GameEngine.FRAME_COLOR_RED, GameEngine.FRAME_COLOR_BLUE};
/** このモードを所有するGameManager */
private GameManager owner;
/** 描画などのイベント処理 */
private EventReceiver receiver;
/** Rule settings for countering ojama not yet dropped */
private int[] ojamaCounterMode;
/** 溜まっている邪魔ブロックの数 */
private int[] ojama;
/** 送った邪魔ブロックの数 */
private int[] ojamaSent;
/** 最後にスコア獲得してから経過した時間 */
private int[] scgettime;
/** 使用するBGM */
private int bgmno;
/** ビッグ */
private boolean[] big;
/** 効果音ON/OFF */
private boolean[] enableSE;
/** マップ使用フラグ */
private boolean[] useMap;
/** 使用するマップセット番号 */
private int[] mapSet;
/** マップ番号(-1でランダム) */
private int[] mapNumber;
/** 最後に使ったプリセット番号 */
private int[] presetNumber;
/** 勝者 */
private int winnerID;
/** マップセットのプロパティファイル */
private CustomProperties[] propMap;
/** 最大マップ番号 */
private int[] mapMaxNo;
/** バックアップ用フィールド(マップをリプレイに保存するときに使用) */
private Field[] fldBackup;
/** マップ選択用乱数 */
private Random randMap;
/** バージョン */
private int version;
/** Flag for all clear */
private boolean[] zenKeshi;
/** Amount of points earned from most recent clear */
private int[] lastscore, lastmultiplier;
/** Amount of ojama added in current chain */
private int[] ojamaAdd;
/** Score */
private int[] score;
/** Max amount of ojama dropped at once */
private int[] maxAttack;
/** Number of colors to use */
private int[] numColors;
/** Minimum chain count needed to send ojama */
private int[] rensaShibari;
/** Denominator for score-to-ojama conversion */
private int[] ojamaRate;
/** Settings for hard ojama blocks */
private int[] ojamaHard;
/** Hurryup開始までの秒数(0でHurryupなし) */
private int[] hurryupSeconds;
/** Fever points needed to enter Fever Mode */
private int[] feverThreshold;
/** Fever points */
private int[] feverPoints;
/** Fever time */
private int[] feverTime;
/** Minimum and maximum fever time */
private int[] feverTimeMin, feverTimeMax;
/** Flag set to true when player is in Fever Mode */
private boolean[] inFever;
/** Backup fields for Fever Mode */
private Field[] feverBackupField;
/** Second ojama counter for Fever Mode */
private int[] ojamaFever;
/** Set to true when opponent starts chain while in Fever Mode */
private boolean[] ojamaAddToFever;
/** Set to true when last drop resulted in a clear */
private boolean[] cleared;
/** Set to true when dropping ojama blocks */
private boolean[] ojamaDrop;
/*
* モード名
*/
@Override
public String getName() {
return "AVALANCHE VS-BATTLE (BETA)";
}
/*
* プレイヤー数
*/
@Override
public int getPlayers() {
return MAX_PLAYERS;
}
/*
* モードの初期化
*/
@Override
public void modeInit(GameManager manager) {
owner = manager;
receiver = owner.receiver;
ojamaCounterMode = new int[MAX_PLAYERS];
ojama = new int[MAX_PLAYERS];
ojamaSent = new int[MAX_PLAYERS];
scgettime = new int[MAX_PLAYERS];
bgmno = 0;
big = new boolean[MAX_PLAYERS];
enableSE = new boolean[MAX_PLAYERS];
hurryupSeconds = new int[MAX_PLAYERS];
useMap = new boolean[MAX_PLAYERS];
mapSet = new int[MAX_PLAYERS];
mapNumber = new int[MAX_PLAYERS];
presetNumber = new int[MAX_PLAYERS];
propMap = new CustomProperties[MAX_PLAYERS];
mapMaxNo = new int[MAX_PLAYERS];
fldBackup = new Field[MAX_PLAYERS];
randMap = new Random();
zenKeshi = new boolean[MAX_PLAYERS];
lastscore = new int[MAX_PLAYERS];
lastmultiplier = new int[MAX_PLAYERS];
ojamaAdd = new int[MAX_PLAYERS];
score = new int[MAX_PLAYERS];
numColors = new int[MAX_PLAYERS];
maxAttack = new int[MAX_PLAYERS];
rensaShibari = new int[MAX_PLAYERS];
ojamaRate = new int[MAX_PLAYERS];
ojamaHard = new int[MAX_PLAYERS];
feverThreshold = new int[MAX_PLAYERS];
feverPoints = new int[MAX_PLAYERS];
feverTime = new int[MAX_PLAYERS];
feverTimeMin = new int[MAX_PLAYERS];
feverTimeMax = new int[MAX_PLAYERS];
inFever = new boolean[MAX_PLAYERS];
feverBackupField = new Field[MAX_PLAYERS];
ojamaFever = new int[MAX_PLAYERS];
ojamaAddToFever = new boolean[MAX_PLAYERS];
cleared = new boolean[MAX_PLAYERS];
ojamaDrop = new boolean[MAX_PLAYERS];
winnerID = -1;
}
/**
* スピードプリセットを読み込み
* @param engine GameEngine
* @param prop 読み込み元のプロパティファイル
* @param preset プリセット番号
*/
private void loadPreset(GameEngine engine, CustomProperties prop, int preset) {
engine.speed.gravity = prop.getProperty("avalanchevs.gravity." + preset, 4);
engine.speed.denominator = prop.getProperty("avalanchevs.denominator." + preset, 256);
engine.speed.are = prop.getProperty("avalanchevs.are." + preset, 24);
engine.speed.areLine = prop.getProperty("avalanchevs.areLine." + preset, 24);
engine.speed.lineDelay = prop.getProperty("avalanchevs.lineDelay." + preset, 10);
engine.speed.lockDelay = prop.getProperty("avalanchevs.lockDelay." + preset, 30);
engine.speed.das = prop.getProperty("avalanchevs.das." + preset, 14);
}
/**
* スピードプリセットを保存
* @param engine GameEngine
* @param prop 保存先のプロパティファイル
* @param preset プリセット番号
*/
private void savePreset(GameEngine engine, CustomProperties prop, int preset) {
prop.setProperty("avalanchevs.gravity." + preset, engine.speed.gravity);
prop.setProperty("avalanchevs.denominator." + preset, engine.speed.denominator);
prop.setProperty("avalanchevs.are." + preset, engine.speed.are);
prop.setProperty("avalanchevs.areLine." + preset, engine.speed.areLine);
prop.setProperty("avalanchevs.lineDelay." + preset, engine.speed.lineDelay);
prop.setProperty("avalanchevs.lockDelay." + preset, engine.speed.lockDelay);
prop.setProperty("avalanchevs.das." + preset, engine.speed.das);
}
/**
* スピード以外の設定を読み込み
* @param engine GameEngine
* @param prop 読み込み元のプロパティファイル
*/
private void loadOtherSetting(GameEngine engine, CustomProperties prop) {
int playerID = engine.playerID;
bgmno = prop.getProperty("avalanchevs.bgmno", 0);
ojamaCounterMode[playerID] = prop.getProperty("avalanchevs.ojamaCounterMode", OJAMA_COUNTER_ON);
big[playerID] = prop.getProperty("avalanchevs.big.p" + playerID, false);
enableSE[playerID] = prop.getProperty("avalanchevs.enableSE.p" + playerID, true);
hurryupSeconds[playerID] = prop.getProperty("vsbattle.hurryupSeconds.p" + playerID, 192);
useMap[playerID] = prop.getProperty("avalanchevs.useMap.p" + playerID, false);
mapSet[playerID] = prop.getProperty("avalanchevs.mapSet.p" + playerID, 0);
mapNumber[playerID] = prop.getProperty("avalanchevs.mapNumber.p" + playerID, -1);
presetNumber[playerID] = prop.getProperty("avalanchevs.presetNumber.p" + playerID, 0);
maxAttack[playerID] = prop.getProperty("avalanchevs.maxAttack.p" + playerID, 30);
numColors[playerID] = prop.getProperty("avalanchevs.numColors.p" + playerID, 5);
rensaShibari[playerID] = prop.getProperty("avalanchevs.rensaShibari.p" + playerID, 1);
ojamaRate[playerID] = prop.getProperty("avalanchevs.ojamaRate.p" + playerID, 120);
ojamaHard[playerID] = prop.getProperty("avalanchevs.ojamaHard.p" + playerID, 0);
feverThreshold[playerID] = prop.getProperty("avalanchevs.feverThreshold.p" + playerID, 0);
feverTimeMin[playerID] = prop.getProperty("avalanchevs.feverTimeMin.p" + playerID, 15);
feverTimeMax[playerID] = prop.getProperty("avalanchevs.feverTimeMax.p" + playerID, 30);
}
/**
* スピード以外の設定を保存
* @param engine GameEngine
* @param prop 保存先のプロパティファイル
*/
private void saveOtherSetting(GameEngine engine, CustomProperties prop) {
int playerID = engine.playerID;
prop.setProperty("avalanchevs.bgmno", bgmno);
prop.setProperty("avalanchevs.ojamaCounterMode", ojamaCounterMode[playerID]);
prop.setProperty("avalanchevs.big.p" + playerID, big[playerID]);
prop.setProperty("avalanchevs.enableSE.p" + playerID, enableSE[playerID]);
prop.setProperty("vsbattle.hurryupSeconds.p" + playerID, hurryupSeconds[playerID]);
prop.setProperty("avalanchevs.useMap.p" + playerID, useMap[playerID]);
prop.setProperty("avalanchevs.mapSet.p" + playerID, mapSet[playerID]);
prop.setProperty("avalanchevs.mapNumber.p" + playerID, mapNumber[playerID]);
prop.setProperty("avalanchevs.presetNumber.p" + playerID, presetNumber[playerID]);
prop.setProperty("avalanchevs.maxAttack.p" + playerID, maxAttack[playerID]);
prop.setProperty("avalanchevs.numColors.p" + playerID, numColors[playerID]);
prop.setProperty("avalanchevs.rensaShibari.p" + playerID, rensaShibari[playerID]);
prop.setProperty("avalanchevs.ojamaRate.p" + playerID, ojamaRate[playerID]);
prop.setProperty("avalanchevs.ojamaHard.p" + playerID, ojamaHard[playerID]);
prop.setProperty("avalanchevs.feverThreshold.p" + playerID, feverThreshold[playerID]);
}
/**
* マップ読み込み
* @param field フィールド
* @param prop 読み込み元のプロパティファイル
* @param preset 任意のID
*/
private void loadMap(Field field, CustomProperties prop, int id) {
field.reset();
//field.readProperty(prop, id);
field.stringToField(prop.getProperty("map." + id, ""));
field.setAllAttribute(Block.BLOCK_ATTRIBUTE_VISIBLE, true);
field.setAllAttribute(Block.BLOCK_ATTRIBUTE_OUTLINE, true);
field.setAllAttribute(Block.BLOCK_ATTRIBUTE_SELFPLACED, false);
}
/**
* マップ保存
* @param field フィールド
* @param prop 保存先のプロパティファイル
* @param id 任意のID
*/
private void saveMap(Field field, CustomProperties prop, int id) {
//field.writeProperty(prop, id);
prop.setProperty("map." + id, field.fieldToString());
}
/**
* プレビュー用にマップを読み込み
* @param engine GameEngine
* @param playerID プレイヤー番号
* @param id マップID
* @param forceReload trueにするとマップファイルを強制再読み込み
*/
private void loadMapPreview(GameEngine engine, int playerID, int id, boolean forceReload) {
if((propMap[playerID] == null) || (forceReload)) {
mapMaxNo[playerID] = 0;
propMap[playerID] = receiver.loadProperties("config/map/vsbattle/" + mapSet[playerID] + ".map");
}
if((propMap[playerID] == null) && (engine.field != null)) {
engine.field.reset();
} else if(propMap[playerID] != null) {
mapMaxNo[playerID] = propMap[playerID].getProperty("map.maxMapNumber", 0);
engine.createFieldIfNeeded();
loadMap(engine.field, propMap[playerID], id);
engine.field.setAllSkin(engine.getSkin());
}
}
/*
* 各プレイヤーの初期化
*/
@Override
public void playerInit(GameEngine engine, int playerID) {
if(playerID == 1) {
engine.randSeed = owner.engine[0].randSeed;
engine.random = new Random(owner.engine[0].randSeed);
}
engine.framecolor = PLAYER_COLOR_FRAME[playerID];
engine.clearMode = GameEngine.CLEAR_COLOR;
engine.garbageColorClear = true;
engine.lineGravityType = GameEngine.LINE_GRAVITY_CASCADE;
for(int i = 0; i < Piece.PIECE_COUNT; i++)
engine.nextPieceEnable[i] = (PIECE_ENABLE[i] == 1);
engine.blockColors = BLOCK_COLORS;
engine.randomBlockColor = true;
engine.connectBlocks = false;
ojama[playerID] = 0;
ojamaSent[playerID] = 0;
score[playerID] = 0;
zenKeshi[playerID] = false;
scgettime[playerID] = 0;
feverPoints[playerID] = 0;
feverTime[playerID] = feverTimeMin[playerID] * 60;
inFever[playerID] = false;
feverBackupField[playerID] = null;
cleared[playerID] = false;
ojamaDrop[playerID] = false;
if(engine.owner.replayMode == false) {
loadOtherSetting(engine, engine.owner.modeConfig);
loadPreset(engine, engine.owner.modeConfig, -1 - playerID);
version = CURRENT_VERSION;
} else {
loadOtherSetting(engine, engine.owner.replayProp);
loadPreset(engine, engine.owner.replayProp, -1 - playerID);
version = owner.replayProp.getProperty("avalanchevs.version", 0);
}
}
/*
* 設定画面の処理
*/
@Override
public boolean onSetting(GameEngine engine, int playerID) {
// メニュー
if((engine.owner.replayMode == false) && (engine.statc[4] == 0)) {
// 上
if(engine.ctrl.isMenuRepeatKey(Controller.BUTTON_UP)) {
engine.statc[2]--;
if(engine.statc[2] < 0) engine.statc[2] = 24;
engine.playSE("cursor");
}
// 下
if(engine.ctrl.isMenuRepeatKey(Controller.BUTTON_DOWN)) {
engine.statc[2]++;
if(engine.statc[2] > 24) engine.statc[2] = 0;
engine.playSE("cursor");
}
// 設定変更
int change = 0;
if(engine.ctrl.isMenuRepeatKey(Controller.BUTTON_LEFT)) change = -1;
if(engine.ctrl.isMenuRepeatKey(Controller.BUTTON_RIGHT)) change = 1;
if(change != 0) {
engine.playSE("change");
int m = 1;
if(engine.ctrl.isPress(Controller.BUTTON_E)) m = 100;
if(engine.ctrl.isPress(Controller.BUTTON_F)) m = 1000;
switch(engine.statc[2]) {
case 0:
engine.speed.gravity += change * m;
if(engine.speed.gravity < -1) engine.speed.gravity = 99999;
if(engine.speed.gravity > 99999) engine.speed.gravity = -1;
break;
case 1:
engine.speed.denominator += change * m;
if(engine.speed.denominator < -1) engine.speed.denominator = 99999;
if(engine.speed.denominator > 99999) engine.speed.denominator = -1;
break;
case 2:
engine.speed.are += change;
if(engine.speed.are < 0) engine.speed.are = 99;
if(engine.speed.are > 99) engine.speed.are = 0;
break;
case 3:
engine.speed.areLine += change;
if(engine.speed.areLine < 0) engine.speed.areLine = 99;
if(engine.speed.areLine > 99) engine.speed.areLine = 0;
break;
case 4:
engine.speed.lineDelay += change;
if(engine.speed.lineDelay < 0) engine.speed.lineDelay = 99;
if(engine.speed.lineDelay > 99) engine.speed.lineDelay = 0;
break;
case 5:
engine.speed.lockDelay += change;
if(engine.speed.lockDelay < 0) engine.speed.lockDelay = 99;
if(engine.speed.lockDelay > 99) engine.speed.lockDelay = 0;
break;
case 6:
engine.speed.das += change;
if(engine.speed.das < 0) engine.speed.das = 99;
if(engine.speed.das > 99) engine.speed.das = 0;
break;
case 7:
case 8:
presetNumber[playerID] += change;
if(presetNumber[playerID] < 0) presetNumber[playerID] = 99;
if(presetNumber[playerID] > 99) presetNumber[playerID] = 0;
break;
case 9:
ojamaCounterMode[playerID] += change;
if(ojamaCounterMode[playerID] < 0) ojamaCounterMode[playerID] = 2;
if(ojamaCounterMode[playerID] > 2) ojamaCounterMode[playerID] = 0;
break;
case 10:
maxAttack[playerID] += change;
if(maxAttack[playerID] < 0) maxAttack[playerID] = 99;
if(maxAttack[playerID] > 99) maxAttack[playerID] = 0;
break;
case 11:
numColors[playerID] += change;
if(numColors[playerID] < 3) numColors[playerID] = 5;
if(numColors[playerID] > 5) numColors[playerID] = 3;
break;
case 12:
rensaShibari[playerID] += change;
if(rensaShibari[playerID] < 1) rensaShibari[playerID] = 20;
if(rensaShibari[playerID] > 20) rensaShibari[playerID] = 1;
break;
case 13:
ojamaRate[playerID] += change*10;
if(ojamaRate[playerID] < 10) ojamaRate[playerID] = 1000;
if(ojamaRate[playerID] > 1000) ojamaRate[playerID] = 10;
break;
case 14:
big[playerID] = !big[playerID];
break;
case 15:
enableSE[playerID] = !enableSE[playerID];
break;
case 16:
hurryupSeconds[playerID] += change;
if(hurryupSeconds[playerID] < 0) hurryupSeconds[playerID] = 300;
if(hurryupSeconds[playerID] > 300) hurryupSeconds[playerID] = 0;
break;
case 17:
ojamaHard[playerID] += change;
if(ojamaHard[playerID] < 0) ojamaHard[playerID] = 9;
if(ojamaHard[playerID] > 9) ojamaHard[playerID] = 0;
break;
case 18:
bgmno += change;
if(bgmno < 0) bgmno = BGMStatus.BGM_COUNT - 1;
if(bgmno > BGMStatus.BGM_COUNT - 1) bgmno = 0;
break;
case 19:
useMap[playerID] = !useMap[playerID];
if(!useMap[playerID]) {
if(engine.field != null) engine.field.reset();
} else {
loadMapPreview(engine, playerID, (mapNumber[playerID] < 0) ? 0 : mapNumber[playerID], true);
}
break;
case 20:
mapSet[playerID] += change;
if(mapSet[playerID] < 0) mapSet[playerID] = 99;
if(mapSet[playerID] > 99) mapSet[playerID] = 0;
if(useMap[playerID]) {
mapNumber[playerID] = -1;
loadMapPreview(engine, playerID, (mapNumber[playerID] < 0) ? 0 : mapNumber[playerID], true);
}
break;
case 21:
if(useMap[playerID]) {
mapNumber[playerID] += change;
if(mapNumber[playerID] < -1) mapNumber[playerID] = mapMaxNo[playerID] - 1;
if(mapNumber[playerID] > mapMaxNo[playerID] - 1) mapNumber[playerID] = -1;
loadMapPreview(engine, playerID, (mapNumber[playerID] < 0) ? 0 : mapNumber[playerID], true);
} else {
mapNumber[playerID] = -1;
}
break;
case 22:
feverThreshold[playerID] += change;
if(feverThreshold[playerID] < 0) feverThreshold[playerID] = 9;
if(feverThreshold[playerID] > 9) feverThreshold[playerID] = 0;
break;
case 23:
feverTimeMin[playerID] += change;
if(feverTimeMin[playerID] < 1) feverTimeMin[playerID] = feverTimeMax[playerID];
if(feverTimeMin[playerID] > feverTimeMax[playerID]) feverTimeMin[playerID] = 1;
break;
case 24:
feverTimeMax[playerID] += change;
if(feverTimeMax[playerID] < feverTimeMin[playerID]) feverTimeMax[playerID] = 99;
if(feverTimeMax[playerID] > 99) feverTimeMax[playerID] = feverTimeMin[playerID];
break;
}
}
// 決定
if(engine.ctrl.isPush(Controller.BUTTON_A) && (engine.statc[3] >= 5)) {
engine.playSE("decide");
if(engine.statc[2] == 7) {
loadPreset(engine, owner.modeConfig, presetNumber[playerID]);
} else if(engine.statc[2] == 8) {
savePreset(engine, owner.modeConfig, presetNumber[playerID]);
receiver.saveModeConfig(owner.modeConfig);
} else {
saveOtherSetting(engine, owner.modeConfig);
savePreset(engine, owner.modeConfig, -1 - playerID);
receiver.saveModeConfig(owner.modeConfig);
engine.statc[4] = 1;
}
}
// キャンセル
if(engine.ctrl.isPush(Controller.BUTTON_B)) {
engine.quitflag = true;
}
// プレビュー用マップ読み込み
if(useMap[playerID] && (engine.statc[3] == 0)) {
loadMapPreview(engine, playerID, (mapNumber[playerID] < 0) ? 0 : mapNumber[playerID], true);
}
// ランダムマッププレビュー
if(useMap[playerID] && (propMap[playerID] != null) && (mapNumber[playerID] < 0)) {
if(engine.statc[3] % 30 == 0) {
engine.statc[5]++;
if(engine.statc[5] >= mapMaxNo[playerID]) engine.statc[5] = 0;
loadMapPreview(engine, playerID, engine.statc[5], false);
}
}
engine.statc[3]++;
} else if(engine.statc[4] == 0) {
engine.statc[3]++;
engine.statc[2] = 0;
if(engine.statc[3] >= 60) {
engine.statc[2] = 9;
}
if(engine.statc[3] >= 120) {
engine.statc[4] = 1;
}
} else {
// 開始
if((owner.engine[0].statc[4] == 1) && (owner.engine[1].statc[4] == 1) && (playerID == 1)) {
owner.engine[0].stat = GameEngine.STAT_READY;
owner.engine[1].stat = GameEngine.STAT_READY;
owner.engine[0].resetStatc();
owner.engine[1].resetStatc();
}
// キャンセル
else if(engine.ctrl.isPush(Controller.BUTTON_B)) {
engine.statc[4] = 0;
}
}
return true;
}
/*
* 設定画面の描画
*/
@Override
public void renderSetting(GameEngine engine, int playerID) {
if(engine.statc[4] == 0) {
if(engine.statc[2] < 9) {
if(owner.replayMode == false) {
receiver.drawMenuFont(engine, playerID, 0, (engine.statc[2] * 2) + 1, "b",
(playerID == 0) ? EventReceiver.COLOR_RED : EventReceiver.COLOR_BLUE);
}
receiver.drawMenuFont(engine, playerID, 0, 0, "GRAVITY", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 1, String.valueOf(engine.speed.gravity), (engine.statc[2] == 0));
receiver.drawMenuFont(engine, playerID, 0, 2, "G-MAX", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 3, String.valueOf(engine.speed.denominator), (engine.statc[2] == 1));
receiver.drawMenuFont(engine, playerID, 0, 4, "ARE", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 5, String.valueOf(engine.speed.are), (engine.statc[2] == 2));
receiver.drawMenuFont(engine, playerID, 0, 6, "ARE LINE", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 7, String.valueOf(engine.speed.areLine), (engine.statc[2] == 3));
receiver.drawMenuFont(engine, playerID, 0, 8, "LINE DELAY", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 9, String.valueOf(engine.speed.lineDelay), (engine.statc[2] == 4));
receiver.drawMenuFont(engine, playerID, 0, 10, "LOCK DELAY", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 11, String.valueOf(engine.speed.lockDelay), (engine.statc[2] == 5));
receiver.drawMenuFont(engine, playerID, 0, 12, "DAS", EventReceiver.COLOR_ORANGE);
receiver.drawMenuFont(engine, playerID, 1, 13, String.valueOf(engine.speed.das), (engine.statc[2] == 6));
receiver.drawMenuFont(engine, playerID, 0, 14, "LOAD", EventReceiver.COLOR_GREEN);
receiver.drawMenuFont(engine, playerID, 1, 15, String.valueOf(presetNumber[playerID]), (engine.statc[2] == 7));
receiver.drawMenuFont(engine, playerID, 0, 16, "SAVE", EventReceiver.COLOR_GREEN);
receiver.drawMenuFont(engine, playerID, 1, 17, String.valueOf(presetNumber[playerID]), (engine.statc[2] == 8));
} else if(engine.statc[2] < 19) {
if(owner.replayMode == false) {
receiver.drawMenuFont(engine, playerID, 0, ((engine.statc[2] - 9) * 2) + 1, "b",
(playerID == 0) ? EventReceiver.COLOR_RED : EventReceiver.COLOR_BLUE);
}
receiver.drawMenuFont(engine, playerID, 0, 0, "COUNTER", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 1, OJAMA_COUNTER_STRING[ojamaCounterMode[playerID]], (engine.statc[2] == 9));
receiver.drawMenuFont(engine, playerID, 0, 2, "MAX ATTACK", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 3, String.valueOf(maxAttack[playerID]), (engine.statc[2] == 10));
receiver.drawMenuFont(engine, playerID, 0, 4, "COLORS", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 5, String.valueOf(numColors[playerID]), (engine.statc[2] == 11));
receiver.drawMenuFont(engine, playerID, 0, 6, "MIN CHAIN", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 7, String.valueOf(rensaShibari[playerID]), (engine.statc[2] == 12));
receiver.drawMenuFont(engine, playerID, 0, 8, "OJAMA RATE", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 9, String.valueOf(ojamaRate[playerID]), (engine.statc[2] == 13));
receiver.drawMenuFont(engine, playerID, 0, 10, "BIG", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 11, GeneralUtil.getONorOFF(big[playerID]), (engine.statc[2] == 14));
receiver.drawMenuFont(engine, playerID, 0, 12, "SE", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 13, GeneralUtil.getONorOFF(enableSE[playerID]), (engine.statc[2] == 15));
receiver.drawMenuFont(engine, playerID, 0, 14, "HURRYUP", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 15, (hurryupSeconds[playerID] == 0) ? "NONE" : hurryupSeconds[playerID]+"SEC",
(engine.statc[2] == 16));
receiver.drawMenuFont(engine, playerID, 0, 16, "HARD OJAMA", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 17, String.valueOf(ojamaHard[playerID]), (engine.statc[2] == 17));
receiver.drawMenuFont(engine, playerID, 0, 18, "BGM", EventReceiver.COLOR_PINK);
receiver.drawMenuFont(engine, playerID, 1, 19, String.valueOf(bgmno), (engine.statc[2] == 18));
} else {
if(owner.replayMode == false) {
receiver.drawMenuFont(engine, playerID, 0, ((engine.statc[2] - 19) * 2) + 1, "b",
(playerID == 0) ? EventReceiver.COLOR_RED : EventReceiver.COLOR_BLUE);
}
receiver.drawMenuFont(engine, playerID, 0, 0, "USE MAP", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 1, GeneralUtil.getONorOFF(useMap[playerID]), (engine.statc[2] == 19));
receiver.drawMenuFont(engine, playerID, 0, 2, "MAP SET", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 3, String.valueOf(mapSet[playerID]), (engine.statc[2] == 20));
receiver.drawMenuFont(engine, playerID, 0, 4, "MAP NO.", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 5, (mapNumber[playerID] < 0) ? "RANDOM" : mapNumber[playerID]+"/"+(mapMaxNo[playerID]-1),
(engine.statc[2] == 21));
receiver.drawMenuFont(engine, playerID, 0, 6, "FEVER", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 7, (feverThreshold[playerID] == 0) ? "NONE" : feverThreshold[playerID]+" PTS",
(engine.statc[2] == 22));
receiver.drawMenuFont(engine, playerID, 0, 8, "F-MIN TIME", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 9, feverTimeMin[playerID] + "SEC", (engine.statc[2] == 23));
receiver.drawMenuFont(engine, playerID, 0, 10, "F-MAX TIME", EventReceiver.COLOR_CYAN);
receiver.drawMenuFont(engine, playerID, 1, 11, feverTimeMax[playerID] + "SEC", (engine.statc[2] == 24));
}
} else {
receiver.drawMenuFont(engine, playerID, 3, 10, "WAIT", EventReceiver.COLOR_YELLOW);
}
}
/*
* Readyの時の初期化処理(初期化前)
*/
@Override
public boolean onReady(GameEngine engine, int playerID) {
if(engine.statc[0] == 0) {
engine.numColors = numColors[playerID];
feverTime[playerID] = feverTimeMin[playerID] * 60;
// マップ読み込み・リプレイ保存用にバックアップ
if(useMap[playerID]) {
if(owner.replayMode) {
engine.createFieldIfNeeded();
loadMap(engine.field, owner.replayProp, playerID);
engine.field.setAllSkin(engine.getSkin());
} else {
if(propMap[playerID] == null) {
propMap[playerID] = receiver.loadProperties("config/map/vsbattle/" + mapSet[playerID] + ".map");
}
if(propMap[playerID] != null) {
engine.createFieldIfNeeded();
if(mapNumber[playerID] < 0) {
if((playerID == 1) && (useMap[0]) && (mapNumber[0] < 0)) {
engine.field.copy(owner.engine[0].field);
} else {
int no = (mapMaxNo[playerID] < 1) ? 0 : randMap.nextInt(mapMaxNo[playerID]);
loadMap(engine.field, propMap[playerID], no);
}
} else {
loadMap(engine.field, propMap[playerID], mapNumber[playerID]);
}
engine.field.setAllSkin(engine.getSkin());
fldBackup[playerID] = new Field(engine.field);
}
}
} else if(engine.field != null) {
engine.field.reset();
}
}
return false;
}
/*
* ゲーム開始時の処理
*/
@Override
public void startGame(GameEngine engine, int playerID) {
engine.b2bEnable = false;
engine.comboType = GameEngine.COMBO_TYPE_DISABLE;
engine.big = big[playerID];
engine.enableSE = enableSE[playerID];
if(playerID == 1) owner.bgmStatus.bgm = bgmno;
engine.colorClearSize = big[playerID] ? 12 : 4;
engine.tspinAllowKick = false;
engine.tspinEnable = false;
engine.useAllSpinBonus = false;
}
/*
* スコア表示
*/
@Override
public void renderLast(GameEngine engine, int playerID) {
// ステータス表示
if(playerID == 0) {
receiver.drawScoreFont(engine, playerID, -1, 0, "AVALANCHE VS", EventReceiver.COLOR_GREEN);
receiver.drawScoreFont(engine, playerID, -1, 2, "OJAMA", EventReceiver.COLOR_PURPLE);
String ojamaStr1P = String.valueOf(ojama[0]);
if (ojamaAdd[0] > 0 && !(inFever[0] && ojamaAddToFever[0]))
ojamaStr1P = ojamaStr1P + "(+" + String.valueOf(ojamaAdd[0]) + ")";
String ojamaStr2P = String.valueOf(ojama[1]);
if (ojamaAdd[1] > 0 && !(inFever[1] && ojamaAddToFever[1]))
ojamaStr2P = ojamaStr2P + "(+" + String.valueOf(ojamaAdd[1]) + ")";
receiver.drawScoreFont(engine, playerID, -1, 3, "1P:", EventReceiver.COLOR_RED);
receiver.drawScoreFont(engine, playerID, 3, 3, ojamaStr1P, (ojama[0] > 0));
receiver.drawScoreFont(engine, playerID, -1, 4, "2P:", EventReceiver.COLOR_BLUE);
receiver.drawScoreFont(engine, playerID, 3, 4, ojamaStr2P, (ojama[1] > 0));
receiver.drawScoreFont(engine, playerID, -1, 6, "ATTACK", EventReceiver.COLOR_GREEN);
receiver.drawScoreFont(engine, playerID, -1, 7, "1P: " + String.valueOf(ojamaSent[0]), EventReceiver.COLOR_RED);
receiver.drawScoreFont(engine, playerID, -1, 8, "2P: " + String.valueOf(ojamaSent[1]), EventReceiver.COLOR_BLUE);
receiver.drawScoreFont(engine, playerID, -1, 10, "SCORE", EventReceiver.COLOR_PURPLE);
receiver.drawScoreFont(engine, playerID, -1, 11, "1P: " + String.valueOf(score[0]), EventReceiver.COLOR_RED);
receiver.drawScoreFont(engine, playerID, -1, 12, "2P: " + String.valueOf(score[1]), EventReceiver.COLOR_BLUE);
receiver.drawScoreFont(engine, playerID, -1, 14, "TIME", EventReceiver.COLOR_GREEN);
receiver.drawScoreFont(engine, playerID, -1, 15, GeneralUtil.getTime(engine.statistics.time));
if (inFever[0] || inFever[1])
{
receiver.drawScoreFont(engine, playerID, -1, 17, "FEVER OJAMA", EventReceiver.COLOR_PURPLE);
String ojamaFeverStr1P = String.valueOf(ojamaFever[0]);
if (ojamaAdd[0] > 0 && inFever[0] && ojamaAddToFever[0])
ojamaFeverStr1P = ojamaFeverStr1P + "(+" + String.valueOf(ojamaAdd[0]) + ")";
String ojamaFeverStr2P = String.valueOf(ojamaFever[1]);
if (ojamaAdd[1] > 0 && inFever[1] && ojamaAddToFever[1])
ojamaFeverStr2P = ojamaFeverStr2P + "(+" + String.valueOf(ojamaAdd[1]) + ")";
receiver.drawScoreFont(engine, playerID, -1, 18, "1P:", EventReceiver.COLOR_RED);
receiver.drawScoreFont(engine, playerID, 3, 18, ojamaFeverStr1P, (ojamaFever[0] > 0));
receiver.drawScoreFont(engine, playerID, -1, 19, "2P:", EventReceiver.COLOR_BLUE);
receiver.drawScoreFont(engine, playerID, 3, 19, ojamaFeverStr2P, (ojamaFever[1] > 0));
}
}
if (!owner.engine[playerID].gameActive)
return;
int playerColor = (playerID == 0) ? EventReceiver.COLOR_RED : EventReceiver.COLOR_BLUE;
if (feverThreshold[playerID] > 0)
{
receiver.drawMenuFont(engine, playerID, 0, 17, "FEVER POINT", playerColor);
receiver.drawMenuFont(engine, playerID, 0, 18, feverPoints[playerID] + " / " + feverThreshold[playerID], inFever[playerID]);
receiver.drawMenuFont(engine, playerID, 0, 19, "FEVER TIME", playerColor);
receiver.drawMenuFont(engine, playerID, 0, 20, GeneralUtil.getTime(feverTime[playerID]), inFever[playerID]);
}
if(zenKeshi[playerID])
receiver.drawMenuFont(engine, playerID, 1, 21, "ZENKESHI!", EventReceiver.COLOR_YELLOW);
if (ojamaHard[playerID] > 0 && engine.field != null)
for (int x = 0; x < engine.field.getWidth(); x++)
for (int y = 0; y < engine.field.getHeight(); y++)
{
int hard = engine.field.getBlock(x, y).hard;
if (hard > 0)
receiver.drawMenuFont(engine, playerID, x, y, String.valueOf(hard), EventReceiver.COLOR_YELLOW);
}
}
/*
* スコア計算
*/
@Override
public void calcScore(GameEngine engine, int playerID, int avalanche) {
int enemyID = 0;
if(playerID == 0) enemyID = 1;
if (big[playerID])
avalanche >>= 2;
// ラインクリアボーナス
int pts = avalanche*10;
int ojamaNew = 0;
if (avalanche > 0) {
cleared[playerID] = true;
if (zenKeshi[playerID])
ojamaNew += 30;
if (engine.field.isEmpty()) {
engine.playSE("bravo");
zenKeshi[playerID] = true;
engine.statistics.score += 2100;
}
else
zenKeshi[playerID] = false;
int chain = engine.chain;
engine.playSE("combo" + Math.min(chain, 20));
if (chain == 1)
ojamaAddToFever[enemyID] = inFever[enemyID];
int multiplier = engine.field.colorClearExtraCount;
if (big[playerID])
multiplier >>= 2;
if (engine.field.colorsCleared > 1)
multiplier += (engine.field.colorsCleared-1)*2;
/*
if (multiplier < 0)
multiplier = 0;
if (chain == 0)
firstExtra = avalanche > engine.colorClearSize;
*/
if (chain == 2)
multiplier += 8;
else if (chain == 3)
multiplier += 16;
else if (chain >= 4)
multiplier += 32*(chain-3);
/*
if (firstExtra)
multiplier++;
*/
if (multiplier > 999)
multiplier = 999;
if (multiplier < 1)
multiplier = 1;
lastscore[playerID] = pts;
lastmultiplier[playerID] = multiplier;
scgettime[playerID] = 120;
int ptsTotal = pts*multiplier;
score[playerID] += ptsTotal;
if (hurryupSeconds[playerID] > 0 && engine.statistics.time > hurryupSeconds[playerID])
ptsTotal <<= engine.statistics.time / (hurryupSeconds[playerID] * 60);
ojamaNew += (ptsTotal+ojamaRate[playerID]-1)/ojamaRate[playerID];
if (chain >= rensaShibari[playerID])
{
ojamaSent[playerID] += ojamaNew;
if (ojamaCounterMode[playerID] != OJAMA_COUNTER_OFF)
{
boolean countered = false;
if (inFever[playerID])
{
if (ojamaFever[playerID] > 0 && ojamaNew > 0)
{
int delta = Math.min(ojamaFever[playerID], ojamaNew);
ojamaFever[playerID] -= delta;
ojamaNew -= delta;
countered = true;
}
if (ojamaAdd[playerID] > 0 && ojamaNew > 0)
{
int delta = Math.min(ojamaAdd[playerID], ojamaNew);
ojamaAdd[playerID] -= delta;
ojamaNew -= delta;
countered = true;
}
}
if (ojama[playerID] > 0 && ojamaNew > 0)
{
int delta = Math.min(ojama[playerID], ojamaNew);
ojama[playerID] -= delta;
ojamaNew -= delta;
countered = true;
}
if (ojamaAdd[playerID] > 0 && ojamaNew > 0)
{
int delta = Math.min(ojamaAdd[playerID], ojamaNew);
ojamaAdd[playerID] -= delta;
ojamaNew -= delta;
countered = true;
}
if (countered)
{
if (feverThreshold[playerID] > 0 && feverThreshold[playerID] > feverPoints[playerID])
feverPoints[playerID]++;
if (feverThreshold[enemyID] > 0)
feverTime[enemyID] = Math.min(feverTime[enemyID]+60,feverTimeMax[enemyID]*60);
}
}
if (ojamaNew > 0)
ojamaAdd[enemyID] += ojamaNew;
}
}
else if (!engine.field.canCascade())
cleared[playerID] = false;
}
public boolean lineClearEnd(GameEngine engine, int playerID) {
int enemyID = 0;
if(playerID == 0) enemyID = 1;
if (ojamaAdd[enemyID] > 0)
{
if (ojamaAddToFever[enemyID] && inFever[enemyID])
ojamaFever[enemyID] += ojamaAdd[enemyID];
else
ojama[enemyID] += ojamaAdd[enemyID];
ojamaAdd[enemyID] = 0;
}
checkFeverEnd(engine, playerID);
int ojamaNow = inFever[playerID] ? ojamaFever[playerID] : ojama[playerID];
if (ojamaNow > 0 && !ojamaDrop[playerID] &&
(!cleared[playerID] || ojamaCounterMode[playerID] != OJAMA_COUNTER_FEVER))
{
ojamaDrop[playerID] = true;
int drop = Math.min(ojamaNow, maxAttack[playerID]);
if (inFever[playerID])
ojamaFever[playerID] -= drop;
else
ojama[playerID] -= drop;
engine.field.garbageDrop(engine, drop, big[playerID], ojamaHard[playerID]);
return true;
}
checkFeverStart(engine, playerID);
return false;
}
private void checkFeverStart(GameEngine engine, int playerID)
{
if (!inFever[playerID] && feverPoints[playerID] == feverThreshold[playerID])
{
inFever[playerID] = true;
feverBackupField[playerID] = engine.field;
engine.field = null;
engine.createFieldIfNeeded();
//TODO: Add preset chain.
}
}
private void checkFeverEnd(GameEngine engine, int playerID)
{
if (inFever[playerID] && feverTime[playerID] == 0)
{
inFever[playerID] = false;
feverTime[playerID] = feverTimeMin[playerID] * 60;
feverPoints[playerID] = 0;
engine.field = feverBackupField[playerID];
ojama[playerID] += ojamaFever[playerID];
ojamaFever[playerID] = 0;
ojamaAddToFever[playerID] = false;
}
}
/*
* 各フレームの最後の処理
*/
@Override
public void onLast(GameEngine engine, int playerID) {
scgettime[playerID]++;
if (inFever[playerID] && feverTime[playerID] > 0)
{
if (feverTime[playerID] == 1)
engine.playSE("levelstop");
feverTime[playerID]--;
}
if (engine.stat == GameEngine.STAT_MOVE)
ojamaDrop[playerID] = false;
int width = 1;
if (engine.field != null)
width = engine.field.getWidth();
int blockHeight = receiver.getBlockGraphicsHeight(engine, playerID);
// せり上がりメーター
if(ojama[playerID] * blockHeight / width > engine.meterValue) {
engine.meterValue++;
} else if(ojama[playerID] * blockHeight / width < engine.meterValue) {
engine.meterValue--;
}
if(ojama[playerID] >= 5*width) engine.meterColor = GameEngine.METER_COLOR_RED;
else if(ojama[playerID] >= width) engine.meterColor = GameEngine.METER_COLOR_ORANGE;
else if(ojama[playerID] >= 1) engine.meterColor = GameEngine.METER_COLOR_YELLOW;
else engine.meterColor = GameEngine.METER_COLOR_GREEN;
// 決着
if((playerID == 1) && (owner.engine[0].gameActive)) {
if((owner.engine[0].stat == GameEngine.STAT_GAMEOVER) && (owner.engine[1].stat == GameEngine.STAT_GAMEOVER)) {
// 引き分け
winnerID = -1;
owner.engine[0].gameActive = false;
owner.engine[1].gameActive = false;
owner.bgmStatus.bgm = BGMStatus.BGM_NOTHING;
} else if((owner.engine[0].stat != GameEngine.STAT_GAMEOVER) && (owner.engine[1].stat == GameEngine.STAT_GAMEOVER)) {
// 1P勝利
winnerID = 0;
owner.engine[0].gameActive = false;
owner.engine[1].gameActive = false;
owner.engine[0].stat = GameEngine.STAT_EXCELLENT;
owner.engine[0].resetStatc();
owner.engine[0].statc[1] = 1;
owner.bgmStatus.bgm = BGMStatus.BGM_NOTHING;
} else if((owner.engine[0].stat == GameEngine.STAT_GAMEOVER) && (owner.engine[1].stat != GameEngine.STAT_GAMEOVER)) {
// 2P勝利
winnerID = 1;
owner.engine[0].gameActive = false;
owner.engine[1].gameActive = false;
owner.engine[1].stat = GameEngine.STAT_EXCELLENT;
owner.engine[1].resetStatc();
owner.engine[1].statc[1] = 1;
owner.bgmStatus.bgm = BGMStatus.BGM_NOTHING;
}
}
}
/*
* 結果画面の描画
*/
@Override
public void renderResult(GameEngine engine, int playerID) {
receiver.drawMenuFont(engine, playerID, 0, 1, "RESULT", EventReceiver.COLOR_ORANGE);
if(winnerID == -1) {
receiver.drawMenuFont(engine, playerID, 6, 2, "DRAW", EventReceiver.COLOR_GREEN);
} else if(winnerID == playerID) {
receiver.drawMenuFont(engine, playerID, 6, 2, "WIN!", EventReceiver.COLOR_YELLOW);
} else {
receiver.drawMenuFont(engine, playerID, 6, 2, "LOSE", EventReceiver.COLOR_WHITE);
}
receiver.drawMenuFont(engine, playerID, 0, 3, "ATTACK", EventReceiver.COLOR_ORANGE);
String strScore = String.format("%10d", ojamaSent[playerID]);
receiver.drawMenuFont(engine, playerID, 0, 4, strScore);
receiver.drawMenuFont(engine, playerID, 0, 5, "LINE", EventReceiver.COLOR_ORANGE);
String strLines = String.format("%10d", engine.statistics.lines);
receiver.drawMenuFont(engine, playerID, 0, 6, strLines);
receiver.drawMenuFont(engine, playerID, 0, 7, "PIECE", EventReceiver.COLOR_ORANGE);
String strPiece = String.format("%10d", engine.statistics.totalPieceLocked);
receiver.drawMenuFont(engine, playerID, 0, 8, strPiece);
receiver.drawMenuFont(engine, playerID, 0, 9, "ATTACK/MIN", EventReceiver.COLOR_ORANGE);
float apm = (float)(ojamaSent[playerID] * 3600) / (float)(engine.statistics.time);
String strAPM = String.format("%10g", apm);
receiver.drawMenuFont(engine, playerID, 0, 10, strAPM);
receiver.drawMenuFont(engine, playerID, 0, 11, "LINE/MIN", EventReceiver.COLOR_ORANGE);
String strLPM = String.format("%10g", engine.statistics.lpm);
receiver.drawMenuFont(engine, playerID, 0, 12, strLPM);
receiver.drawMenuFont(engine, playerID, 0, 13, "PIECE/SEC", EventReceiver.COLOR_ORANGE);
String strPPS = String.format("%10g", engine.statistics.pps);
receiver.drawMenuFont(engine, playerID, 0, 14, strPPS);
receiver.drawMenuFont(engine, playerID, 0, 15, "TIME", EventReceiver.COLOR_ORANGE);
String strTime = String.format("%10s", GeneralUtil.getTime(owner.engine[0].statistics.time));
receiver.drawMenuFont(engine, playerID, 0, 16, strTime);
}
/*
* リプレイ保存時の処理
*/
@Override
public void saveReplay(GameEngine engine, int playerID, CustomProperties prop) {
saveOtherSetting(engine, owner.replayProp);
savePreset(engine, owner.replayProp, -1 - playerID);
if(useMap[playerID] && (fldBackup[playerID] != null)) {
saveMap(fldBackup[playerID], owner.replayProp, playerID);
}
owner.replayProp.setProperty("avalanchevs.version", version);
}
}
|
Fix bug in Fever support
|
src/org/game_host/hebo/nullpomino/game/subsystem/mode/AvalancheVSMode.java
|
Fix bug in Fever support
|
|
Java
|
bsd-3-clause
|
66caa978560d5af4066d45090fc07753476ecdd2
| 0
|
UoLCompSoc/ld32,UoLCompSoc/ld32,UoLCompSoc/ld32
|
package uk.org.ulcompsoc.ld32.systems;
import uk.org.ulcompsoc.ld32.components.Wallet;
import uk.org.ulcompsoc.ld32.util.Mappers;
import uk.org.ulcompsoc.ld32.util.TextureManager;
import uk.org.ulcompsoc.ld32.util.TextureName;
import com.badlogic.ashley.core.Entity;
import com.badlogic.ashley.core.Family;
import com.badlogic.ashley.systems.IteratingSystem;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.math.Vector3;
public class GUIRenderSystem extends IteratingSystem {
final Batch batch;
final TextureManager textureManager;
private OrthographicCamera camera;
private TextureRegion frame = null;
private final Vector3 DFLT_POSITION_OF_THE_FRAME = new Vector3(0.0f, 0.0f, 0.0f);
private Vector3 temp;
@SuppressWarnings("unchecked")
public GUIRenderSystem(final Batch batch, final TextureManager textureManager, final OrthographicCamera cam,
int priority) {
super(Family.all(Wallet.class).get(), priority);
this.batch = batch;
this.textureManager = textureManager;
this.camera = cam;
this.frame = new TextureRegion(textureManager.nameMap.get(TextureName.FRAME_1));
// TextureRegion(textureManager.nameMap.get(TextureName.));
}
@Override
protected void processEntity(Entity entity, float deltaTime) {
Wallet wallet = Mappers.walletMapper.get(entity);
camera.update();
temp = camera.unproject(DFLT_POSITION_OF_THE_FRAME.cpy());
batch.setProjectionMatrix(camera.combined);
int redcount = wallet.red;
int bluecount = wallet.blue;
int greencount = wallet.green;
batch.begin();
batch.draw(frame, temp.x, temp.y - frame.getRegionHeight());
batch.end();
// batch.draw(textureManager., x, y, originX, originY, width, height,
// scaleX, scaleY, rotation);
}
}
|
core/src/uk/org/ulcompsoc/ld32/systems/GUIRenderSystem.java
|
package uk.org.ulcompsoc.ld32.systems;
import uk.org.ulcompsoc.ld32.components.Wallet;
import uk.org.ulcompsoc.ld32.util.Mappers;
import uk.org.ulcompsoc.ld32.util.TextureManager;
import uk.org.ulcompsoc.ld32.util.TextureName;
import com.badlogic.ashley.core.Entity;
import com.badlogic.ashley.core.Family;
import com.badlogic.ashley.systems.IteratingSystem;
import com.badlogic.gdx.graphics.OrthographicCamera;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.math.Vector3;
public class GUIRenderSystem extends IteratingSystem {
final Batch batch;
final TextureManager textureManager;
private OrthographicCamera camera;
private TextureRegion frame = null;
private final Vector3 DFLT_POSITION_OF_THE_FRAME = new Vector3(0.0f, 0.0f, 0.0f);
private Vector3 temp;
@SuppressWarnings("unchecked")
public GUIRenderSystem(final Batch batch, final TextureManager textureManager, final OrthographicCamera cam,
int priority) {
super(Family.all(Wallet.class).get(), priority);
this.batch = batch;
this.textureManager = textureManager;
this.camera = cam;
this.frame = new TextureRegion(textureManager.nameMap.get(TextureName.FRAME_1));
// TextureRegion(textureManager.nameMap.get(TextureName.));
}
@Override
protected void processEntity(Entity entity, float deltaTime) {
Wallet wallet = Mappers.walletMapper.get(entity);
camera.update();
temp = camera.unproject(DFLT_POSITION_OF_THE_FRAME.cpy());
batch.setProjectionMatrix(camera.combined);
int redcount = wallet.red;
int bluecount = wallet.blue;
int greencount = wallet.green;
batch.begin();
batch.draw(frame, temp.x, temp.y+frame.getRegionHeight());
batch.end();
// batch.draw(textureManager., x, y, originX, originY, width, height,
// scaleX, scaleY, rotation);
}
}
|
Fix guirendersystem
|
core/src/uk/org/ulcompsoc/ld32/systems/GUIRenderSystem.java
|
Fix guirendersystem
|
|
Java
|
bsd-3-clause
|
3cbc82b2ee51597080c07b21bcf2cc6656fadf8f
| 0
|
oci-pronghorn/GreenLightning,oci-pronghorn/GreenLightning,oci-pronghorn/GreenLightning
|
package com.ociweb.gl.impl;
import java.io.IOException;
import com.ociweb.gl.api.FieldReader;
import com.ociweb.pronghorn.pipe.DataInputBlobReader;
import com.ociweb.pronghorn.pipe.DataOutputBlobWriter;
import com.ociweb.pronghorn.pipe.MessageSchema;
import com.ociweb.pronghorn.pipe.Pipe;
import com.ociweb.pronghorn.util.Appendables;
import com.ociweb.pronghorn.util.TrieParser;
import com.ociweb.pronghorn.util.TrieParserReader;
import com.ociweb.pronghorn.util.math.Decimal;
public class PayloadReader<S extends MessageSchema<S>> extends DataInputBlobReader<S> implements FieldReader{
private TrieParser extractionParser;
private TrieParserReader reader = new TrieParserReader(true);
private int limit = -1;
public PayloadReader(Pipe<S> pipe) {
super(pipe);
}
private static <S extends MessageSchema<S>> void checkLimit(PayloadReader<S> that, int min) {
if ( (that.position+min) > that.limit ) {
throw new RuntimeException("Read attempted beyond the end of the field data");
}
}
@Override
public int openHighLevelAPIField(int loc) {
int len = super.openHighLevelAPIField(loc);
limit = len + position;
return len;
}
@Override
public int openLowLevelAPIField() {
int len = super.openLowLevelAPIField();
limit = len + position;
return len;
}
public void setFieldNameParser(TrieParser extractionParser) {
this.extractionParser = extractionParser;
}
private int fieldIdx(long fieldId) {
return (int)fieldId & 0xFFFF;
}
private int fieldType(long fieldId) {
return (((int)fieldId)>>16) & 0xFF;
}
private int computePosition(long fieldId) {
assert(fieldId>=0) : "check field name, it does not match any found field";
//jump to end and index backwards to find data position
return readFromEndLastInt(fieldIdx(fieldId));
}
private int computePositionSecond(long fieldId) {
assert(fieldId>=0) : "check field name, it does not match any found field";
//jump to end and index backwards to find data position
return readFromEndLastInt(1+fieldIdx(fieldId));
}
public long getFieldId(byte[] fieldName) {
return reader.query(reader, extractionParser, fieldName, 0, fieldName.length, Integer.MAX_VALUE);
}
public long getLong(byte[] fieldName) {
return getLong(getFieldId(fieldName));
}
public int getInt(byte[] fieldName) {
return (int)getLong(getFieldId(fieldName));
}
public int getInt(int fieldName) {
return (int)getLong(fieldName);
}
public short getShort(byte[] fieldName) {
return (short)getLong(getFieldId(fieldName));
}
public short getShort(int fieldName) {
return (short)getLong(fieldName);
}
public byte getByte(byte[] fieldName) {
return (byte)getLong(getFieldId(fieldName));
}
public byte getByte(int fieldName) {
return (byte)getLong(fieldName);
}
@SuppressWarnings("unchecked")
public long getLong(long fieldId) {
position(computePosition(fieldId));
checkLimit(this,1);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_SIGNED_INT) {
return DataInputBlobReader.readPackedLong(this);
} else if (type == TrieParser.ESCAPE_CMD_BYTES) {
return DataInputBlobReader.readUTFAsLong(this);
} else if (type == TrieParser.ESCAPE_CMD_RATIONAL) {
long numerator = DataInputBlobReader.readPackedLong(this);
long denominator = DataInputBlobReader.readPackedLong(this);
return numerator/denominator;
} else if (type == TrieParser.ESCAPE_CMD_DECIMAL) {
return readDecimalAsLong();
}
throw new UnsupportedOperationException("unknown type "+type);
}
public long getLongDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_SIGNED_INT == fieldType(fieldId));
position(computePosition(fieldId));
checkLimit(this,1);
return DataInputBlobReader.readPackedLong(this);
}
public double getDoubleDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_DECIMAL == fieldType(fieldId));
position(computePosition(fieldId));
checkLimit(this,2);
return Decimal.asDouble(readPackedLong(this), readByte());
}
public <A extends Appendable> A getTextDirect(long fieldId, A appendable) {
assert(TrieParser.ESCAPE_CMD_BYTES == fieldType(fieldId));
position(computePosition(fieldId));
checkLimit(this,2);
readUTF(appendable);
return appendable;
}
public long getRationalNumeratorDirect(byte[] fieldName) {
return getRationalNumeratorDirect(getFieldId(fieldName));
}
public long getRationalNumeratorDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_RATIONAL == fieldType(fieldId));
position(computePosition(fieldId));
checkLimit(this,1);
return DataInputBlobReader.readPackedLong(this);
}
public long getRationalDenominatorDirect(byte[] fieldName) {
return getRationalDenominatorDirect(getFieldId(fieldName));
}
public long getRationalDenominatorDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_RATIONAL == fieldType(fieldId));
position(computePositionSecond(fieldId));
checkLimit(this,1);
return DataInputBlobReader.readPackedLong(this);
}
public long getDecimalMantissaDirect(byte[] fieldName) {
return getDecimalMantissaDirect(getFieldId(fieldName));
}
public long getDecimalMantissaDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_DECIMAL == fieldType(fieldId));
position(computePosition(fieldId));
checkLimit(this,1);
return DataInputBlobReader.readPackedLong(this);
}
public byte getDecimalExponentDirect(byte[] fieldName) {
return (byte)getDecimalExponentDirect(getFieldId(fieldName));
}
public byte getDecimalExponentDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_DECIMAL == fieldType(fieldId));
position(computePositionSecond(fieldId));
checkLimit(this,1);
return readByte();
}
public double getDouble(byte[] fieldName) {
return getDouble(getFieldId(fieldName));
}
@SuppressWarnings("unchecked")
public double getDouble(long fieldId) {
position(computePosition(fieldId));
checkLimit(this,1);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_DECIMAL) {
return readDecimalAsDouble();
} else if (type == TrieParser.ESCAPE_CMD_SIGNED_INT) {
return (double)DataInputBlobReader.readPackedLong(this);
} else if (type == TrieParser.ESCAPE_CMD_BYTES) {
return DataInputBlobReader.readUTFAsDecimal(this);
} else if (type == TrieParser.ESCAPE_CMD_RATIONAL) {
double numerator = DataInputBlobReader.readPackedLong(this);
double denominator = DataInputBlobReader.readPackedLong(this);
return numerator/denominator;
}
throw new UnsupportedOperationException("unknown type "+type+" field "+Long.toHexString(fieldId));
}
public long getRationalNumerator(byte[] fieldName) {
return getRationalNumerator(getFieldId(fieldName));
}
@SuppressWarnings("unchecked")
public long getRationalNumerator(long fieldId) {
position(computePosition(fieldId));
checkLimit(this,1);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_RATIONAL) {
return DataInputBlobReader.readPackedLong(this);
} else if (type == TrieParser.ESCAPE_CMD_DECIMAL) {
long m = readPackedLong();
byte e = readByte();
return e<0 ? m : Decimal.asLong(m, e);
} else if (type == TrieParser.ESCAPE_CMD_SIGNED_INT) {
return DataInputBlobReader.readPackedLong(this);
} else if (type == TrieParser.ESCAPE_CMD_BYTES) {
return DataInputBlobReader.readUTFAsLong(this);
}
throw new UnsupportedOperationException("unknown type "+type);
}
public long getRationalDenominator(byte[] fieldName) {
return getRationalDenominator(getFieldId(fieldName));
}
@SuppressWarnings("unchecked")
public long getRationalDenominator(long fieldId) {
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_RATIONAL) {
position(computePositionSecond(fieldId));
checkLimit(this,1);
return DataInputBlobReader.readPackedLong(this);
} else if (type == TrieParser.ESCAPE_CMD_DECIMAL) {
position(computePosition(fieldId));
checkLimit(this,1);
DataInputBlobReader.readPackedLong(this);
byte e = readByte();
return e<0 ? (long)(1d/Decimal.powdi[64 - e]) : 1;
} else if (type == TrieParser.ESCAPE_CMD_SIGNED_INT) {
return 1;
} else if (type == TrieParser.ESCAPE_CMD_BYTES) {
return 1;
}
throw new UnsupportedOperationException("unknown type "+type);
}
public <A extends Appendable> A getText(byte[] fieldName, A appendable) {
return getText(getFieldId(fieldName),appendable);
}
@SuppressWarnings("unchecked")
public <A extends Appendable> A getText(long fieldId, A appendable) {
position(computePosition(fieldId));
checkLimit(this,2);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_BYTES) {
readUTF(appendable);
return appendable;
} else if (type == TrieParser.ESCAPE_CMD_SIGNED_INT) {
Appendables.appendValue(appendable, readPackedLong());
return appendable;
} else if (type == TrieParser.ESCAPE_CMD_RATIONAL) {
long numerator = DataInputBlobReader.readPackedLong(this);
long denominator = DataInputBlobReader.readPackedLong(this);
Appendables.appendValue(Appendables.appendValue(appendable, numerator),"/",denominator);
return appendable;
} else if (type == TrieParser.ESCAPE_CMD_DECIMAL) {
long m = DataInputBlobReader.readPackedLong(this);
byte e = readByte();
Appendables.appendDecimalValue(appendable, m, e);
return appendable;
}
throw new UnsupportedOperationException("unknown type "+type);
}
@Override
public boolean isEqual(byte[] fieldName, byte[] equalText) {
return isEqual(getFieldId(fieldName),equalText);
}
@Override
public boolean isEqual(long fieldId, byte[] equalText) {
position(computePosition(fieldId));
checkLimit(this,2);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_BYTES) {
return equalUTF(equalText);
}
throw new UnsupportedOperationException("unsupported type "+type);
}
@Override
public long trieText(byte[] fieldName, TrieParserReader reader, TrieParser trie) {
return trieText(getFieldId(fieldName),reader,trie);
}
@Override
public long trieText(long fieldId, TrieParserReader reader, TrieParser trie) {
position(computePosition(fieldId));
checkLimit(this,2);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_BYTES) {
return parseUTF(reader, trie);
}
throw new UnsupportedOperationException("unsupported type "+type);
}
/////////////////////
@Override
public int read(byte[] b) {
checkLimit(this,2);
return super.read(b);
}
@Override
public int read(byte[] b, int off, int len) {
checkLimit(this,2);//not len because read will read less
return super.read(b, off, len);
}
@Override
public void readFully(byte[] b) {
checkLimit(this,2);
super.readFully(b);
}
@Override
public void readFully(byte[] b, int off, int len) {
checkLimit(this,2);//not len because read will read less
super.readFully(b, off, len);
}
@Override
public int skipBytes(int n) {
checkLimit(this,n);
return super.skipBytes(n);
}
@Override
public boolean readBoolean() {
checkLimit(this,1);
return super.readBoolean();
}
@Override
public byte readByte() {
checkLimit(this,1);
return super.readByte();
}
@Override
public int readUnsignedByte() {
checkLimit(this,1);
return super.readUnsignedByte();
}
@Override
public short readShort() {
checkLimit(this,2);
return super.readShort();
}
@Override
public int readUnsignedShort() {
checkLimit(this,2);
return super.readUnsignedShort();
}
@Override
public char readChar() {
checkLimit(this,1);
return super.readChar();
}
@Override
public int readInt() {
checkLimit(this,4);
return super.readInt();
}
@Override
public long readLong() {
checkLimit(this,8);
return super.readLong();
}
@Override
public float readFloat() {
checkLimit(this,4);
return super.readFloat();
}
@Override
public double readDouble() {
checkLimit(this,8);
return super.readDouble();
}
@Override
public int read() {
checkLimit(this,1);
return super.read();
}
@Override
public String readLine() {
checkLimit(this,1);
return super.readLine();
}
@Override
public String readUTF() {
checkLimit(this,2);
return super.readUTF();
}
@Override
public <A extends Appendable> A readUTF(A target) {
checkLimit(this,2);
return super.readUTF(target);
}
@Override
public Object readObject() {
checkLimit(this,1);
return super.readObject();
}
@Override
public <T extends MessageSchema<T>> void readInto(DataOutputBlobWriter<T> writer, int length) {
checkLimit(this,length);
super.readInto(writer, length);
}
@Override
public <A extends Appendable> A readPackedChars(A target) throws IOException {
checkLimit(this,1);
return super.readPackedChars(target);
}
@Override
public long readPackedLong() {
checkLimit(this,1);
return super.readPackedLong();
}
@Override
public int readPackedInt() {
checkLimit(this,1);
return super.readPackedInt();
}
@Override
public double readDecimalAsDouble() {
checkLimit(this,2);
return super.readDecimalAsDouble();
}
@Override
public long readDecimalAsLong() {
checkLimit(this,2);
return super.readDecimalAsLong();
}
@Override
public short readPackedShort() {
checkLimit(this,1);
return super.readPackedShort();
}
}
|
src/main/java/com/ociweb/gl/impl/PayloadReader.java
|
package com.ociweb.gl.impl;
import java.io.IOException;
import com.ociweb.gl.api.FieldReader;
import com.ociweb.pronghorn.pipe.DataInputBlobReader;
import com.ociweb.pronghorn.pipe.DataOutputBlobWriter;
import com.ociweb.pronghorn.pipe.MessageSchema;
import com.ociweb.pronghorn.pipe.Pipe;
import com.ociweb.pronghorn.util.Appendables;
import com.ociweb.pronghorn.util.TrieParser;
import com.ociweb.pronghorn.util.TrieParserReader;
import com.ociweb.pronghorn.util.math.Decimal;
public class PayloadReader<S extends MessageSchema<S>> extends DataInputBlobReader<S> implements FieldReader{
private TrieParser extractionParser;
private TrieParserReader reader = new TrieParserReader(true);
private int limit = -1;
public PayloadReader(Pipe<S> pipe) {
super(pipe);
}
private static <S extends MessageSchema<S>> void checkLimit(PayloadReader<S> that, int min) {
if ( (that.position+min) > that.limit ) {
throw new RuntimeException("Read attempted beyond the end of the field data");
}
}
@Override
public int openHighLevelAPIField(int loc) {
int len = super.openHighLevelAPIField(loc);
limit = len + position;
return len;
}
@Override
public int openLowLevelAPIField() {
int len = super.openLowLevelAPIField();
limit = len + position;
return len;
}
public void setFieldNameParser(TrieParser extractionParser) {
this.extractionParser = extractionParser;
}
private int fieldIdx(long fieldId) {
return (int)fieldId & 0xFFFF;
}
private int fieldType(long fieldId) {
return (((int)fieldId)>>16) & 0xFF;
}
private int computePosition(long fieldId) {
assert(fieldId>=0) : "check field name, it does not match any found field";
//jump to end and index backwards to find data position
return readFromEndLastInt(fieldIdx(fieldId));
}
private int computePositionSecond(long fieldId) {
assert(fieldId>=0) : "check field name, it does not match any found field";
//jump to end and index backwards to find data position
return readFromEndLastInt(1+fieldIdx(fieldId));
}
public long getFieldId(byte[] fieldName) {
return reader.query(reader, extractionParser, fieldName, 0, fieldName.length, Integer.MAX_VALUE);
}
public long getLong(byte[] fieldName) {
return getLong(getFieldId(fieldName));
}
public int getInt(byte[] fieldName) {
return (int)getLong(getFieldId(fieldName));
}
public int getInt(int fieldName) {
return (int)getLong(fieldName);
}
public short getShort(byte[] fieldName) {
return (short)getLong(getFieldId(fieldName));
}
public short getShort(int fieldName) {
return (short)getLong(fieldName);
}
public byte getByte(byte[] fieldName) {
return (byte)getLong(getFieldId(fieldName));
}
public byte getByte(int fieldName) {
return (byte)getLong(fieldName);
}
@SuppressWarnings("unchecked")
public long getLong(long fieldId) {
position(computePosition(fieldId));
checkLimit(this,1);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_SIGNED_INT) {
return DataInputBlobReader.readPackedLong(this);
} else if (type == TrieParser.ESCAPE_CMD_BYTES) {
return DataInputBlobReader.readUTFAsLong(this);
} else if (type == TrieParser.ESCAPE_CMD_RATIONAL) {
long numerator = DataInputBlobReader.readPackedLong(this);
long denominator = DataInputBlobReader.readPackedLong(this);
return numerator/denominator;
} else if (type == TrieParser.ESCAPE_CMD_DECIMAL) {
return readDecimalAsLong();
}
throw new UnsupportedOperationException("unknown type "+type);
}
public long getLongDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_SIGNED_INT == fieldType(fieldId));
position(computePosition(fieldId));
checkLimit(this,1);
return DataInputBlobReader.readPackedLong(this);
}
public double getDoubleDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_DECIMAL == fieldType(fieldId));
position(computePosition(fieldId));
checkLimit(this,2);
return Decimal.asDouble(readPackedLong(this), readByte());
}
public <A extends Appendable> A getTextDirect(long fieldId, A appendable) {
assert(TrieParser.ESCAPE_CMD_BYTES == fieldType(fieldId));
position(computePosition(fieldId));
checkLimit(this,2);
readUTF(appendable);
return appendable;
}
public long getRationalNumeratorDirect(byte[] fieldName) {
return getRationalNumeratorDirect(getFieldId(fieldName));
}
public long getRationalNumeratorDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_RATIONAL == fieldType(fieldId));
position(computePosition(fieldId));
checkLimit(this,1);
return DataInputBlobReader.readPackedLong(this);
}
public long getRationalDenominatorDirect(byte[] fieldName) {
return getRationalDenominatorDirect(getFieldId(fieldName));
}
public long getRationalDenominatorDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_RATIONAL == fieldType(fieldId));
position(computePositionSecond(fieldId));
checkLimit(this,1);
return DataInputBlobReader.readPackedLong(this);
}
public long getDecimalMantissaDirect(byte[] fieldName) {
return getDecimalMantissaDirect(getFieldId(fieldName));
}
public long getDecimalMantissaDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_DECIMAL == fieldType(fieldId));
position(computePosition(fieldId));
checkLimit(this,1);
return DataInputBlobReader.readPackedLong(this);
}
public byte getDecimalExponentDirect(byte[] fieldName) {
return (byte)getDecimalExponentDirect(getFieldId(fieldName));
}
public byte getDecimalExponentDirect(long fieldId) {
assert(TrieParser.ESCAPE_CMD_DECIMAL == fieldType(fieldId));
position(computePositionSecond(fieldId));
checkLimit(this,1);
return readByte();
}
public double getDouble(byte[] fieldName) {
return getDouble(getFieldId(fieldName));
}
@SuppressWarnings("unchecked")
public double getDouble(long fieldId) {
position(computePosition(fieldId));
checkLimit(this,1);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_DECIMAL) {
return readDecimalAsDouble();
} else if (type == TrieParser.ESCAPE_CMD_SIGNED_INT) {
return (double)DataInputBlobReader.readPackedLong(this);
} else if (type == TrieParser.ESCAPE_CMD_BYTES) {
return DataInputBlobReader.readUTFAsDecimal(this);
} else if (type == TrieParser.ESCAPE_CMD_RATIONAL) {
double numerator = DataInputBlobReader.readPackedLong(this);
double denominator = DataInputBlobReader.readPackedLong(this);
return numerator/denominator;
}
throw new UnsupportedOperationException("unknown type "+type+" field "+Long.toHexString(fieldId));
}
public long getRationalNumerator(byte[] fieldName) {
return getRationalNumerator(getFieldId(fieldName));
}
@SuppressWarnings("unchecked")
public long getRationalNumerator(long fieldId) {
position(computePosition(fieldId));
checkLimit(this,1);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_RATIONAL) {
return DataInputBlobReader.readPackedLong(this);
} else if (type == TrieParser.ESCAPE_CMD_DECIMAL) {
long m = readPackedLong();
byte e = readByte();
return e<0 ? m : Decimal.asLong(m, e);
} else if (type == TrieParser.ESCAPE_CMD_SIGNED_INT) {
return DataInputBlobReader.readPackedLong(this);
} else if (type == TrieParser.ESCAPE_CMD_BYTES) {
return DataInputBlobReader.readUTFAsLong(this);
}
throw new UnsupportedOperationException("unknown type "+type);
}
public long getRationalDenominator(byte[] fieldName) {
return getRationalDenominator(getFieldId(fieldName));
}
@SuppressWarnings("unchecked")
public long getRationalDenominator(long fieldId) {
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_RATIONAL) {
position(computePositionSecond(fieldId));
checkLimit(this,1);
return DataInputBlobReader.readPackedLong(this);
} else if (type == TrieParser.ESCAPE_CMD_DECIMAL) {
position(computePosition(fieldId));
checkLimit(this,1);
DataInputBlobReader.readPackedLong(this);
byte e = readByte();
return e<0 ? (long)(1d/Decimal.powdi[64 - e]) : 1;
} else if (type == TrieParser.ESCAPE_CMD_SIGNED_INT) {
return 1;
} else if (type == TrieParser.ESCAPE_CMD_BYTES) {
return 1;
}
throw new UnsupportedOperationException("unknown type "+type);
}
public <A extends Appendable> A getText(byte[] fieldName, A appendable) {
return getText(getFieldId(fieldName),appendable);
}
@SuppressWarnings("unchecked")
public <A extends Appendable> A getText(long fieldId, A appendable) {
position(computePosition(fieldId));
checkLimit(this,2);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_BYTES) {
readUTF(appendable);
return appendable;
} else if (type == TrieParser.ESCAPE_CMD_SIGNED_INT) {
Appendables.appendValue(appendable, readLong());
return appendable;
} else if (type == TrieParser.ESCAPE_CMD_RATIONAL) {
long numerator = DataInputBlobReader.readPackedLong(this);
long denominator = DataInputBlobReader.readPackedLong(this);
Appendables.appendValue(Appendables.appendValue(appendable, numerator),"/",denominator);
return appendable;
} else if (type == TrieParser.ESCAPE_CMD_DECIMAL) {
long m = DataInputBlobReader.readPackedLong(this);
byte e = readByte();
Appendables.appendDecimalValue(appendable, m, e);
return appendable;
}
throw new UnsupportedOperationException("unknown type "+type);
}
@Override
public boolean isEqual(byte[] fieldName, byte[] equalText) {
return isEqual(getFieldId(fieldName),equalText);
}
@Override
public boolean isEqual(long fieldId, byte[] equalText) {
position(computePosition(fieldId));
checkLimit(this,2);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_BYTES) {
return equalUTF(equalText);
}
throw new UnsupportedOperationException("unsupported type "+type);
}
@Override
public long trieText(byte[] fieldName, TrieParserReader reader, TrieParser trie) {
return trieText(getFieldId(fieldName),reader,trie);
}
@Override
public long trieText(long fieldId, TrieParserReader reader, TrieParser trie) {
position(computePosition(fieldId));
checkLimit(this,2);
int type = fieldType(fieldId);
if (type == TrieParser.ESCAPE_CMD_BYTES) {
return parseUTF(reader, trie);
}
throw new UnsupportedOperationException("unsupported type "+type);
}
/////////////////////
@Override
public int read(byte[] b) {
checkLimit(this,2);
return super.read(b);
}
@Override
public int read(byte[] b, int off, int len) {
checkLimit(this,2);//not len because read will read less
return super.read(b, off, len);
}
@Override
public void readFully(byte[] b) {
checkLimit(this,2);
super.readFully(b);
}
@Override
public void readFully(byte[] b, int off, int len) {
checkLimit(this,2);//not len because read will read less
super.readFully(b, off, len);
}
@Override
public int skipBytes(int n) {
checkLimit(this,n);
return super.skipBytes(n);
}
@Override
public boolean readBoolean() {
checkLimit(this,1);
return super.readBoolean();
}
@Override
public byte readByte() {
checkLimit(this,1);
return super.readByte();
}
@Override
public int readUnsignedByte() {
checkLimit(this,1);
return super.readUnsignedByte();
}
@Override
public short readShort() {
checkLimit(this,2);
return super.readShort();
}
@Override
public int readUnsignedShort() {
checkLimit(this,2);
return super.readUnsignedShort();
}
@Override
public char readChar() {
checkLimit(this,1);
return super.readChar();
}
@Override
public int readInt() {
checkLimit(this,4);
return super.readInt();
}
@Override
public long readLong() {
checkLimit(this,8);
return super.readLong();
}
@Override
public float readFloat() {
checkLimit(this,4);
return super.readFloat();
}
@Override
public double readDouble() {
checkLimit(this,8);
return super.readDouble();
}
@Override
public int read() {
checkLimit(this,1);
return super.read();
}
@Override
public String readLine() {
checkLimit(this,1);
return super.readLine();
}
@Override
public String readUTF() {
checkLimit(this,2);
return super.readUTF();
}
@Override
public <A extends Appendable> A readUTF(A target) {
checkLimit(this,2);
return super.readUTF(target);
}
@Override
public Object readObject() {
checkLimit(this,1);
return super.readObject();
}
@Override
public <T extends MessageSchema<T>> void readInto(DataOutputBlobWriter<T> writer, int length) {
checkLimit(this,length);
super.readInto(writer, length);
}
@Override
public <A extends Appendable> A readPackedChars(A target) throws IOException {
checkLimit(this,1);
return super.readPackedChars(target);
}
@Override
public long readPackedLong() {
checkLimit(this,1);
return super.readPackedLong();
}
@Override
public int readPackedInt() {
checkLimit(this,1);
return super.readPackedInt();
}
@Override
public double readDecimalAsDouble() {
checkLimit(this,2);
return super.readDecimalAsDouble();
}
@Override
public long readDecimalAsLong() {
checkLimit(this,2);
return super.readDecimalAsLong();
}
@Override
public short readPackedShort() {
checkLimit(this,1);
return super.readPackedShort();
}
}
|
should be packedLong for holding int values
|
src/main/java/com/ociweb/gl/impl/PayloadReader.java
|
should be packedLong for holding int values
|
|
Java
|
mit
|
f8fa83cd888d6b2b7c5cd72b8b15263375813f4f
| 0
|
phenoscape/PhenoscapeDataLoader
|
package org.phenoscape.obd.loader;
import java.util.Arrays;
import java.util.List;
public class Vocab {
public static final String PUB_HAS_DATE = "dc:date";
public static final String PUB_HAS_ABSTRACT = "dc:abstract";
public static final String PUB_HAS_CITATION = "dc:bibliographicCitation";
public static final String PUB_HAS_DOI = "dc:identifier";
public static final String PUBLICATION_TYPE_ID = "PHENOSCAPE:Publication";
public static final String PHENOSCAPE_PUB_NAMESPACE = "phenoscape_pub";
public static final String ZFIN_PUB_NAMESPACE = "zfin_pub";
public static final String DATASET_TYPE_ID = "cdao:CharacterStateDataMatrix";
public static final String STATE_TYPE_ID = "cdao:CharacterStateDomain";
public static final String CELL_TYPE_ID = "cdao:CharacterStateDatum";
public static final String CHARACTER_TYPE_ID = "cdao:Character";
public static final String OTU_TYPE_ID = "cdao:TU";
public static final String SPECIMEN_TYPE_ID = "PHENOSCAPE:Specimen";
public static final String HAS_PUB_REL_ID = "PHENOSCAPE:has_publication";
public static final String HAS_SPECIMEN_REL_ID = "dwc:individualID";
public static final String HAS_STATE_REL_ID = "cdao:has_Datum";
public static final String REFERS_TO_TAXON_REL_ID = "PHENOSCAPE:has_taxon";
public static final String ANNOTATION_TO_OTU_REL_ID = "PHENOSCAPE:asserted_for_otu";
public static final String HAS_TU_REL_ID = "cdao:has_TU";
public static final String HAS_CHARACTER_REL_ID = "cdao:has_Character";
public static final String HAS_PHENOTYPE_REL_ID = "cdao:has_Phenotype";
public static final String TAXON_PHENOTYPE_REL_ID = "PHENOSCAPE:exhibits";
public static final String CELL_TO_STATE_REL_ID = "cdao:has_State";
public static final String ANNOT_TO_CELL_REL_ID = "PHENOSCAPE:has_source";
public static final String SPECIMEN_TO_COLLECTION_REL_ID = "dwc:collectionID";
public static final String SPECIMEN_TO_CATALOG_ID_REL_ID = "dwc:catalogID";
public static final String HAS_CURATORS_REL_ID = "PHENOSCAPE:has_curators";
public static final String HAS_COMMENT_REL_ID = "PHENOSCAPE:has_comment";
public static final String HAS_NUMBER_REL_ID = "PHENOSCAPE:has_number";
public static final String HAS_COUNT_REL_ID = "PHENOSCAPE:has_count";
public static final String HAS_MEASUREMENT_REL_ID = "PHENOSCAPE:has_measurement";
public static final String HAS_UNIT_REL_ID = "PHENOSCAPE:has_unit";
public static final String GENOTYPE_PHENOTYPE_REL_ID = "OBO_REL:influences";
public static final String GENOTYPE_GENE_REL_ID = "OBO_REL:variant_of";
public static final String GENOTYPE_TYPE_ID = "SO:0001027";
public static final String GENE_TYPE_ID = "SO:0000704";
public static final String MORPHOLINO_OLIGO = "SO:0000034";
public static final String POSITED_BY_REL_ID = "posited_by";
public static final String GENE_NAMESPACE = "zfin_gene";
public static final String FULL_NAME_SYNONYM_CATEGORY = "FULLNAME";
public static final String HAS_DBXREF = "oboInOwl:hasDbXref";
public static final String GENE_TO_CELLULAR_COMPONENT_REL_ID = "OBO_REL:located_in";
public static final String GENE_TO_MOLECULAR_FUNCTION_REL_ID = "OBO_REL:has_function";
public static final String GENE_TO_BIOLOGICAL_PROCESS_REL_ID = "OBO_REL:participates_in";
public static final String ARTICULATED_WITH = "PATO:0002278";
public static final String ASSOCIATED_WITH = "PATO:0001668";
public static final String ATTACHED_TO = "PATO:0001667";
public static final String DETACHED_FROM = "PATO:0001453";
public static final String DISSOCIATED_FROM = "PATO:0001738";
public static final String FUSED_WITH = "PATO:0000642";
public static final String IN_CONTACT_WITH = "PATO:0001961";
public static final String OVERLAP_WITH = "PATO:0001590";
public static final String SEPARATED_FROM = "PATO:0001505";
public static final String UNFUSED_FROM = "PATO:0000651";
public static final String STRUCTURE = "PATO:0000141";
public static final List<String> SYMMETRIC_QUALITIES = Arrays.asList(ARTICULATED_WITH, ASSOCIATED_WITH, ATTACHED_TO, DETACHED_FROM, DISSOCIATED_FROM, FUSED_WITH, IN_CONTACT_WITH, OVERLAP_WITH, SEPARATED_FROM, UNFUSED_FROM, STRUCTURE);
}
|
src/org/phenoscape/obd/loader/Vocab.java
|
package org.phenoscape.obd.loader;
import java.util.Arrays;
import java.util.List;
public class Vocab {
public static final String PUB_HAS_DATE = "dc:date";
public static final String PUB_HAS_ABSTRACT = "dc:abstract";
public static final String PUB_HAS_CITATION = "dc:bibliographicCitation";
public static final String PUB_HAS_DOI = "dc:identifier";
public static final String PUBLICATION_TYPE_ID = "PHENOSCAPE:Publication";
public static final String PHENOSCAPE_PUB_NAMESPACE = "phenoscape_pub";
public static final String ZFIN_PUB_NAMESPACE = "zfin_pub";
public static final String DATASET_TYPE_ID = "cdao:CharacterStateDataMatrix";
public static final String STATE_TYPE_ID = "cdao:CharacterStateDomain";
public static final String CELL_TYPE_ID = "cdao:CharacterStateDatum";
public static final String CHARACTER_TYPE_ID = "cdao:Character";
public static final String OTU_TYPE_ID = "cdao:TU";
public static final String SPECIMEN_TYPE_ID = "PHENOSCAPE:Specimen";
public static final String HAS_PUB_REL_ID = "PHENOSCAPE:has_publication";
public static final String HAS_SPECIMEN_REL_ID = "dwc:individualID";
public static final String HAS_STATE_REL_ID = "cdao:has_Datum";
public static final String REFERS_TO_TAXON_REL_ID = "PHENOSCAPE:has_taxon";
public static final String ANNOTATION_TO_OTU_REL_ID = "PHENOSCAPE:asserted_for_otu";
public static final String HAS_TU_REL_ID = "cdao:has_TU";
public static final String HAS_CHARACTER_REL_ID = "cdao:has_Character";
public static final String HAS_PHENOTYPE_REL_ID = "cdao:has_Phenotype";
public static final String TAXON_PHENOTYPE_REL_ID = "PHENOSCAPE:exhibits";
public static final String CELL_TO_STATE_REL_ID = "cdao:has_State";
public static final String ANNOT_TO_CELL_REL_ID = "PHENOSCAPE:has_source";
public static final String SPECIMEN_TO_COLLECTION_REL_ID = "dwc:collectionID";
public static final String SPECIMEN_TO_CATALOG_ID_REL_ID = "dwc:catalogID";
public static final String HAS_CURATORS_REL_ID = "PHENOSCAPE:has_curators";
public static final String HAS_COMMENT_REL_ID = "PHENOSCAPE:has_comment";
public static final String HAS_NUMBER_REL_ID = "PHENOSCAPE:has_number";
public static final String HAS_COUNT_REL_ID = "PHENOSCAPE:has_count";
public static final String HAS_MEASUREMENT_REL_ID = "PHENOSCAPE:has_measurement";
public static final String HAS_UNIT_REL_ID = "PHENOSCAPE:has_unit";
public static final String GENOTYPE_PHENOTYPE_REL_ID = "OBO_REL:influences";
public static final String GENOTYPE_GENE_REL_ID = "OBO_REL:variant_of";
public static final String GENOTYPE_TYPE_ID = "SO:0001027";
public static final String GENE_TYPE_ID = "SO:0000704";
public static final String MORPHOLINO_OLIGO = "SO:0000034";
public static final String POSITED_BY_REL_ID = "posited_by";
public static final String GENE_NAMESPACE = "zfin_gene";
public static final String FULL_NAME_SYNONYM_CATEGORY = "FULLNAME";
public static final String HAS_DBXREF = "oboInOwl:hasDbXref";
public static final String GENE_TO_CELLULAR_COMPONENT_REL_ID = "OBO_REL:located_in";
public static final String GENE_TO_MOLECULAR_FUNCTION_REL_ID = "OBO_REL:has_function";
public static final String GENE_TO_BIOLOGICAL_PROCESS_REL_ID = "OBO_REL:participates_in";
public static final String ARTICULATED_WITH = "PATO:0002278";
public static final String ASSOCIATED_WITH = "PATO:0001668";
public static final String ATTACHED_TO = "PATO:0001667";
public static final String DETACHED_FROM = "PATO:0001453";
public static final String DISSOCIATED_FROM = "PATO:0001738";
public static final String FUSED_WITH = "PATO:0000642";
public static final String IN_CONTACT_WITH = "PATO:0001961";
public static final String OVERLAP_WITH = "PATO:0001590";
public static final String SEPARATED_FROM = "PATO:0001505";
public static final String UNFUSED_FROM = "PATO:0000651";
public static final List<String> SYMMETRIC_QUALITIES = Arrays.asList(ARTICULATED_WITH, ASSOCIATED_WITH, ATTACHED_TO, DETACHED_FROM, DISSOCIATED_FROM, FUSED_WITH, IN_CONTACT_WITH, OVERLAP_WITH, SEPARATED_FROM, UNFUSED_FROM);
}
|
Added structure to symmetric qualities list.
|
src/org/phenoscape/obd/loader/Vocab.java
|
Added structure to symmetric qualities list.
|
|
Java
|
mit
|
24fef570edf6ac333fa5708c8ca5994bc5db7e93
| 0
|
SoftwareEngineeringToolDemos/FSE-2011-Crystal,KaranDagar/crystalvc,KaranDagar/crystalvc,brunyuriy/crystalvc,SoftwareEngineeringToolDemos/FSE-2011-Crystal,KaranDagar/FSE-2011-Crystal---old,KaranDagar/FSE-2011-Crystal---old,brunyuriy/crystalvc,SoftwareEngineeringToolDemos/FSE-2011-Crystal
|
package crystal.client;
import java.awt.AWTException;
import java.awt.Image;
import java.awt.MenuItem;
import java.awt.PopupMenu;
import java.awt.SystemTray;
import java.awt.TrayIcon;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import javax.swing.ImageIcon;
import javax.swing.JOptionPane;
import javax.swing.Timer;
import org.apache.log4j.Logger;
import crystal.Constants;
import crystal.client.ConflictDaemon.ComputationListener;
import crystal.model.LocalStateResult;
import crystal.model.Relationship;
import crystal.util.LSMRLogger;
import crystal.util.RunIt;
import crystal.util.TimeUtility;
/**
* The system tray icon UI. (This lives in the title bar in OS X or somewhere else in Linux). This UI contains a few menu options and allows opening
* up the larger window UI. If the system tray is not supported, the UI switches to a window-only view.
*
* ConflictSystemTray is a singleton.
*
* @author rtholmes
* @author brun
*/
public class ConflictSystemTray implements ComputationListener {
// The singleton instance.
private static ConflictSystemTray _instance;
// The boolean that tells us if the OS supports the system tray.
public static boolean TRAY_SUPPORTED = SystemTray.isSupported();
// The current Crystal version number.
public static String VERSION_ID = "0.1.20110407";
// A pointer to the Crystal window UI.
private ConflictClient _client;
// The logger.
private Logger _log = Logger.getLogger(this.getClass());
// The current configuration.
private ClientPreferences _prefs;
// A timer that we use to refresh the results.
private Timer _timer;
// A placekeeper to remember when we start each calculation.
long startCalculations = 0L;
// A handle on the actual system tray.
final private SystemTray _tray;
// The Crystal tray icon.
final private TrayIcon _trayIcon;
// A menu element that dictates whether Crystal ConflictDeamon is running (refreshing).
private MenuItem daemonEnabledItem;
// A menu element that allows the user to start a new refresh right now.
private MenuItem refreshItem;
// The other menu elements are not referenced from listeners, so they are declared only locally.
/**
* Constructs a brand new Crystal system tray icon, if the OS allows it. If the OS does not allow it, creates an empty tray icon object holding
* some nulls.
*/
private ConflictSystemTray() {
_log.info("ConflictSystemTray - started at: " + TimeUtility.getCurrentLSMRDateString());
if (TRAY_SUPPORTED) {
_tray = SystemTray.getSystemTray();
// _trayIcon = new TrayIcon((new ImageIcon(Constants.class.getResource("/crystal/client/images/bulb.gif"))).getImage());
_trayIcon = new TrayIcon((new ImageIcon(Constants.class.getResource("/crystal/client/images/crystal-ball_blue_32.png"))).getImage());
} else {
_tray = null;
_trayIcon = null;
}
}
/**
* A listener on the about menu item. When the user clicks on "about", a dialog pops up with some info on Crystal.
*/
public void aboutAction() {
JOptionPane
.showMessageDialog(
null,
"Crystal version: "
+ VERSION_ID
+ "\nBuilt by Reid Holmes and Yuriy Brun. Contact brun@cs.washington.edu.\nhttp://www.cs.washington.edu/homes/brun/research/crystal",
"Crystal: Proactive Conflict Detector for Distributed Version Control", JOptionPane.PLAIN_MESSAGE, new ImageIcon(
Constants.class.getResource("/crystal/client/images/crystal-ball_blue_128.png")));
}
/**
* Creates the Crystal system tray icon and installs in the tray.
*/
private void createAndShowGUI() {
// Create components for a popup menu components to be used if System Tray is supported.
MenuItem aboutItem = new MenuItem("About");
MenuItem preferencesItem = new MenuItem("Edit Configuration");
daemonEnabledItem = new MenuItem("Disable Daemon");
refreshItem = new MenuItem("Refresh");
final MenuItem showClientItem = new MenuItem("Show Client");
MenuItem exitItem = new MenuItem("Exit");
try {
_prefs = ClientPreferences.loadPreferencesFromXML();
if (_prefs != null) {
_log.info("Preferences loaded successfully.");
} else {
String msg = "Error loading preferences.";
System.err.println(msg);
_log.error(msg);
}
} catch (Exception e) {
// e.printStackTrace();
String msg = "Error initializing ConflictClient. Please update your preference file ( " + ClientPreferences.CONFIG_PATH + " )";
System.err.println(msg);
_log.error(msg);
System.err.println(e.getMessage());
_log.error(e.getMessage());
String dialogMessage = "The preferences file ( "
+ ClientPreferences.CONFIG_PATH
+ " ) is invalid and could not be loaded:\n > > > "
+ e.getMessage()
+ "\n"
+ "Do you want to edit it using the GUI? This may overwrite your previous configuration file. Your alternative is to edit the .xml file directly.";
int answer = JOptionPane.showConfirmDialog(null, dialogMessage, "Invalid configuration file", JOptionPane.YES_NO_OPTION,
JOptionPane.WARNING_MESSAGE);
if (answer == JOptionPane.YES_OPTION) {
_prefs = ClientPreferences.DEFAULT_CLIENT_PREFERENCES;
PreferencesGUIEditorFrame editorFrame = PreferencesGUIEditorFrame.getPreferencesGUIEditorFrame(_prefs);
JOptionPane.showMessageDialog(editorFrame, "Please remember to restart the client after closing the configuraton editor.");
// and disable client
daemonEnabledItem.setLabel("Enable Daemon");
if (_timer != null) {
_timer.stop();
_timer = null;
}
// for (CalculateTask ct : tasks) {
// _log.info("disabling ct of state: " + ct.getState());
// ct.cancel(true);
// }
} else { // answer == JOptionPane.NO_OPTION
System.out.println("User decided to edit the configuration file by hand");
_log.trace("User decided to edit the configuration file by hand");
quit(0);
}
}
try {
if (_prefs.hasChanged()) {
ClientPreferences.savePreferencesToDefaultXML(_prefs);
_prefs.setChanged(false);
}
} catch (Exception e) {
_log.error("Could not write to the configuration file: " + e.getMessage());
}
// Check that we have a recent-enough version of hg
try {
if (!(RunIt.validHG(Constants.MIN_HG_VERSION, _prefs.getHgPath(), _prefs.getTempDirectory()))) {
JOptionPane.showMessageDialog(null,
"Your computer is running an outdated version of hg.\nThe must be running at least version " + Constants.MIN_HG_VERSION,
"outdated hg", JOptionPane.ERROR_MESSAGE);
quit(1);
}
} catch (IOException e) {
JOptionPane.showMessageDialog(null, "Encountered an exception while checking hg version",
"Error checking hg version", JOptionPane.ERROR_MESSAGE);
quit(1);
}
// Start out with the client showing.
showClient();
if (TRAY_SUPPORTED) {
final PopupMenu trayMenu = new PopupMenu();
_trayIcon.setImage((new ImageIcon(Constants.class.getResource("/crystal/client/images/16X16/must/clock.png"))).getImage());
_trayIcon.setToolTip("Crystal");
// Add components to the popup menu
trayMenu.add(aboutItem);
trayMenu.addSeparator();
trayMenu.add(preferencesItem);
trayMenu.add(daemonEnabledItem);
trayMenu.addSeparator();
trayMenu.add(refreshItem);
trayMenu.addSeparator();
trayMenu.add(showClientItem);
trayMenu.addSeparator();
trayMenu.add(exitItem);
_trayIcon.setPopupMenu(trayMenu);
try {
_tray.add(_trayIcon);
} catch (AWTException e) {
_log.error("TrayIcon could not be added.");
return;
}
_trayIcon.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent ae) {
_log.trace("Tray icon ActionEvent: " + ae.getActionCommand());
// doesn't work on OS X; it doesn't register double clicks on the tray
showClient();
}
});
aboutItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
aboutAction();
}
});
refreshItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
_log.info("Refresh manually selected.");
performCalculations();
}
});
preferencesItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
preferencesAction();
}
});
showClientItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
showClient();
}
});
daemonEnabledItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
daemonAbleAction();
}
});
exitItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
exitAction();
}
});
ConflictDaemon.getInstance().addListener(this);
}
performCalculations();
}
/**
* Creates and starts a new timer (throws away the old one). The timer fires an update every refresh seconds, unless there is a pending task.
*/
private void createTimer() {
// note that the timer works in milliseconds and the argument is in seconds
boolean pTask = false;
// check if anything is PENDING (first local states then relationships
for (LocalStateResult localState : ConflictDaemon.getInstance().getLocalStates()) {
if (localState.getLocalState().equals(LocalStateResult.PENDING)) {
pTask = true;
}
}
for (Relationship relationship : ConflictDaemon.getInstance().getRelationships()) {
if (!(relationship.isReady())) {
pTask = true;
}
}
final boolean pendingTask = pTask;
if (_timer != null) {
_timer.stop();
_timer = null;
}
_timer = new Timer((int) ClientPreferences.REFRESH * 1000, new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
_log.info("Timer fired at: " + TimeUtility.getCurrentLSMRDateString());
if (!pendingTask) {
// if tasks are pending don't start the calculations again
performCalculations();
}
}
});
_timer.setInitialDelay((int) ClientPreferences.REFRESH * 1000);
_timer.start();
long nextFire = System.currentTimeMillis() + _timer.getDelay();
_log.info("Timer created - will fire in: " + TimeUtility.msToHumanReadable(_timer.getInitialDelay()) + " (@ "
+ new SimpleDateFormat("HH:mm:ss").format(new Date(nextFire)) + ")");
}
/**
* A listener for clicking the menu to enable the deamon.
*/
public void daemonAbleAction() {
if (daemonEnabledItem.getLabel().equals("Enable Daemon")) {
// daemon enabled
_log.info("ConflictDaemon enabled");
daemonEnabledItem.setLabel("Disable Daemon");
_client.setDaemonEnabled(true);
if (_timer != null) {
// do it
_timer.start();
} else {
createTimer();
}
} else {
// daemon disabled
_log.info("ConflictDaemon disabled");
daemonEnabledItem.setLabel("Enable Daemon");
_client.setDaemonEnabled(false);
if (_timer != null) {
_timer.stop();
_timer = null;
}
// for (CalculateTask ct : tasks) {
// _log.info("disabling ct of state: " + ct.getState());
// ct.cancel(true);
// }
update();
}
}
/**
* A listener for clicking the menu to exit.
*/
public void exitAction() {
if (TRAY_SUPPORTED)
_tray.remove(_trayIcon);
String msg = "ConflictClient exited successfully.";
System.out.println(msg);
_log.trace("Exit action selected");
quit(0);
}
/**
* If the deamon is not running, does nothing. If the deamon is running, creates a new executor and performs the calculations on all repos of all
* projects of the current configuration.
*/
public void performCalculations() {
// if the daemon is disabled, don't perform calculations.
if (daemonEnabledItem.getLabel().equals("Enable Daemon")) {
return;
}
// if the deamon is enabled.
// # lines marked with //# are removed to simplify the execution process
// # Executor ex = new SerialExecutor();
refreshItem.setLabel("Refreshing...");
_log.trace("refresh text: " + refreshItem.getLabel());
refreshItem.setEnabled(false);
_client.setCanUpdate(false);
startCalculations = System.currentTimeMillis();
// for (ProjectPreferences projPref : _prefs.getProjectPreference()) {
// final CalculateLocalStateTask clst = new CalculateLocalStateTask(projPref, this, _client);
// ex.execute(clst);
//
// for (final DataSource source : projPref.getDataSources()) {
// final CalculateRelationshipTask crt = new CalculateRelationshipTask(source, projPref, this, _client);
// ex.execute(crt);
// }
// }
for (ProjectPreferences projPref : _prefs.getProjectPreference()) {
final CalculateProjectTask cpt = new CalculateProjectTask(projPref, this, _client);
// # ex.execute(cpt);
cpt.execute();
}
}
/**
* Either creates a new one (if one did not exist) or displays the existing GUI configuration editor.
*/
public void preferencesAction() {
PreferencesGUIEditorFrame.getPreferencesGUIEditorFrame(_prefs);
}
/**
* Quit Crystal with a status.
*
* @param status
* : the exit status (0 means normal).
*/
private void quit(int status) {
_log.info("ConflictSystemTray exited - code: " + status + " at: " + TimeUtility.getCurrentLSMRDateString());
System.exit(status);
}
/**
* Show the client and set up the timer.
*/
private void showClient() {
_log.info("Show client requested");
if (_client != null) {
_client.show();
} else {
_client = new ConflictClient();
_client.createAndShowGUI(_prefs);
}
}
/**
* Updates the images and tool tips of all the projects and all the repositories within the current configuration.
*/
@Override
public void update() {
_log.trace("ConflictSystemTray::update()");
// _log.trace("Task size in update: " + tasks.size());
// check if anything is PENDING (first local states then relationships
boolean pendingTask = false;
for (LocalStateResult localState : ConflictDaemon.getInstance().getLocalStates()) {
if (localState.getLocalState().equals(LocalStateResult.PENDING)) {
pendingTask = true;
}
}
for (Relationship relationship : ConflictDaemon.getInstance().getRelationships()) {
if (relationship.getName().equals(Relationship.PENDING)) {
pendingTask = true;
}
}
if (pendingTask) {
_log.trace("Update called with tasks still pending.");
// keep the UI in updating mode
refreshItem.setLabel("Refreshing...");
refreshItem.setEnabled(false);
_client.setCanUpdate(false);
} else {
_log.trace("Update called with no tasks pending.");
createTimer();
refreshItem.setLabel("Refresh");
refreshItem.setEnabled(true);
_client.setCanUpdate(true);
}
if (TRAY_SUPPORTED)
updateTrayIcon();
if (_client != null) {
_client.update();
}
}
/**
* Updates the tray icon image to the harshest relationship in the current configuration.
*/
private void updateTrayIcon() {
if (!TRAY_SUPPORTED)
return;
_trayIcon.getImage().flush();
Image icon = Relationship.getDominant(ConflictDaemon.getInstance().getRelationships());
_trayIcon.setImage(icon);
}
/**
* @return the single instance of ConflictSystemTray
*/
public static ConflictSystemTray getInstance() {
if (_instance == null) {
_instance = new ConflictSystemTray();
}
return _instance;
}
/**
* Main execution point that starts Crystal.
*
* @param args
* : --version : Prints the version number.
*/
public static void main(String[] args) {
if (args.length > 0) {
if (args[0].equals("--version")) {
System.out.println("Crystal version: " + VERSION_ID);
System.exit(0);
}
}
ConflictSystemTray.startLogging();
// UIManager.put("swing.boldMetal", Boolean.FALSE);
ConflictSystemTray cst = ConflictSystemTray.getInstance();
cst.createAndShowGUI();
}
public static void startLogging() {
LSMRLogger.startLog4J(Constants.QUIET_CONSOLE, true, Constants.LOG_LEVEL, System.getProperty("user.home"), ".conflictClientLog");
}
}
|
src/crystal/client/ConflictSystemTray.java
|
package crystal.client;
import java.awt.AWTException;
import java.awt.Image;
import java.awt.MenuItem;
import java.awt.PopupMenu;
import java.awt.SystemTray;
import java.awt.TrayIcon;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import javax.swing.ImageIcon;
import javax.swing.JOptionPane;
import javax.swing.Timer;
import org.apache.log4j.Logger;
import crystal.Constants;
import crystal.client.ConflictDaemon.ComputationListener;
import crystal.model.LocalStateResult;
import crystal.model.Relationship;
import crystal.util.LSMRLogger;
import crystal.util.RunIt;
import crystal.util.TimeUtility;
/**
* The system tray icon UI. (This lives in the title bar in OS X or somewhere else in Linux). This UI contains a few menu options and allows opening
* up the larger window UI. If the system tray is not supported, the UI switches to a window-only view.
*
* ConflictSystemTray is a singleton.
*
* @author rtholmes
* @author brun
*/
public class ConflictSystemTray implements ComputationListener {
// The singleton instance.
private static ConflictSystemTray _instance;
// The boolean that tells us if the OS supports the system tray.
public static boolean TRAY_SUPPORTED = SystemTray.isSupported();
// The current Crystal version number.
public static String VERSION_ID = "0.1.20110403";
// A pointer to the Crystal window UI.
private ConflictClient _client;
// The logger.
private Logger _log = Logger.getLogger(this.getClass());
// The current configuration.
private ClientPreferences _prefs;
// A timer that we use to refresh the results.
private Timer _timer;
// A placekeeper to remember when we start each calculation.
long startCalculations = 0L;
// A handle on the actual system tray.
final private SystemTray _tray;
// The Crystal tray icon.
final private TrayIcon _trayIcon;
// A menu element that dictates whether Crystal ConflictDeamon is running (refreshing).
private MenuItem daemonEnabledItem;
// A menu element that allows the user to start a new refresh right now.
private MenuItem refreshItem;
// The other menu elements are not referenced from listeners, so they are declared only locally.
/**
* Constructs a brand new Crystal system tray icon, if the OS allows it. If the OS does not allow it, creates an empty tray icon object holding
* some nulls.
*/
private ConflictSystemTray() {
_log.info("ConflictSystemTray - started at: " + TimeUtility.getCurrentLSMRDateString());
if (TRAY_SUPPORTED) {
_tray = SystemTray.getSystemTray();
// _trayIcon = new TrayIcon((new ImageIcon(Constants.class.getResource("/crystal/client/images/bulb.gif"))).getImage());
_trayIcon = new TrayIcon((new ImageIcon(Constants.class.getResource("/crystal/client/images/crystal-ball_blue_32.png"))).getImage());
} else {
_tray = null;
_trayIcon = null;
}
}
/**
* A listener on the about menu item. When the user clicks on "about", a dialog pops up with some info on Crystal.
*/
public void aboutAction() {
JOptionPane
.showMessageDialog(
null,
"Crystal version: "
+ VERSION_ID
+ "\nBuilt by Reid Holmes and Yuriy Brun. Contact brun@cs.washington.edu.\nhttp://www.cs.washington.edu/homes/brun/research/crystal",
"Crystal: Proactive Conflict Detector for Distributed Version Control", JOptionPane.PLAIN_MESSAGE, new ImageIcon(
Constants.class.getResource("/crystal/client/images/crystal-ball_blue_128.png")));
}
/**
* Creates the Crystal system tray icon and installs in the tray.
*/
private void createAndShowGUI() {
// Create components for a popup menu components to be used if System Tray is supported.
MenuItem aboutItem = new MenuItem("About");
MenuItem preferencesItem = new MenuItem("Edit Configuration");
daemonEnabledItem = new MenuItem("Disable Daemon");
refreshItem = new MenuItem("Refresh");
final MenuItem showClientItem = new MenuItem("Show Client");
MenuItem exitItem = new MenuItem("Exit");
try {
_prefs = ClientPreferences.loadPreferencesFromXML();
if (_prefs != null) {
_log.info("Preferences loaded successfully.");
} else {
String msg = "Error loading preferences.";
System.err.println(msg);
_log.error(msg);
}
} catch (Exception e) {
// e.printStackTrace();
String msg = "Error initializing ConflictClient. Please update your preference file ( " + ClientPreferences.CONFIG_PATH + " )";
System.err.println(msg);
_log.error(msg);
System.err.println(e.getMessage());
_log.error(e.getMessage());
String dialogMessage = "The preferences file ( "
+ ClientPreferences.CONFIG_PATH
+ " ) is invalid and could not be loaded:\n > > > "
+ e.getMessage()
+ "\n"
+ "Do you want to edit it using the GUI? This may overwrite your previous configuration file. Your alternative is to edit the .xml file directly.";
int answer = JOptionPane.showConfirmDialog(null, dialogMessage, "Invalid configuration file", JOptionPane.YES_NO_OPTION,
JOptionPane.WARNING_MESSAGE);
if (answer == JOptionPane.YES_OPTION) {
_prefs = ClientPreferences.DEFAULT_CLIENT_PREFERENCES;
PreferencesGUIEditorFrame editorFrame = PreferencesGUIEditorFrame.getPreferencesGUIEditorFrame(_prefs);
JOptionPane.showMessageDialog(editorFrame, "Please remember to restart the client after closing the configuraton editor.");
// and disable client
daemonEnabledItem.setLabel("Enable Daemon");
if (_timer != null) {
_timer.stop();
_timer = null;
}
// for (CalculateTask ct : tasks) {
// _log.info("disabling ct of state: " + ct.getState());
// ct.cancel(true);
// }
} else { // answer == JOptionPane.NO_OPTION
System.out.println("User decided to edit the configuration file by hand");
_log.trace("User decided to edit the configuration file by hand");
quit(0);
}
}
try {
if (_prefs.hasChanged()) {
ClientPreferences.savePreferencesToDefaultXML(_prefs);
_prefs.setChanged(false);
}
} catch (Exception e) {
_log.error("Could not write to the configuration file: " + e.getMessage());
}
// Check that we have a recent-enough version of hg
try {
if (!(RunIt.validHG(Constants.MIN_HG_VERSION, _prefs.getHgPath(), _prefs.getTempDirectory()))) {
JOptionPane.showMessageDialog(null,
"Your computer is running an outdated version of hg.\nThe must be running at least version " + Constants.MIN_HG_VERSION,
"outdated hg", JOptionPane.ERROR_MESSAGE);
quit(1);
}
} catch (IOException e) {
JOptionPane.showMessageDialog(null, "Encountered an exception while checking hg version",
"Error checking hg version", JOptionPane.ERROR_MESSAGE);
quit(1);
}
// Start out with the client showing.
showClient();
if (TRAY_SUPPORTED) {
final PopupMenu trayMenu = new PopupMenu();
_trayIcon.setImage((new ImageIcon(Constants.class.getResource("/crystal/client/images/16X16/must/clock.png"))).getImage());
_trayIcon.setToolTip("Crystal");
// Add components to the popup menu
trayMenu.add(aboutItem);
trayMenu.addSeparator();
trayMenu.add(preferencesItem);
trayMenu.add(daemonEnabledItem);
trayMenu.addSeparator();
trayMenu.add(refreshItem);
trayMenu.addSeparator();
trayMenu.add(showClientItem);
trayMenu.addSeparator();
trayMenu.add(exitItem);
_trayIcon.setPopupMenu(trayMenu);
try {
_tray.add(_trayIcon);
} catch (AWTException e) {
_log.error("TrayIcon could not be added.");
return;
}
_trayIcon.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent ae) {
_log.trace("Tray icon ActionEvent: " + ae.getActionCommand());
// doesn't work on OS X; it doesn't register double clicks on the tray
showClient();
}
});
aboutItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
aboutAction();
}
});
refreshItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
_log.info("Refresh manually selected.");
performCalculations();
}
});
preferencesItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
preferencesAction();
}
});
showClientItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
showClient();
}
});
daemonEnabledItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
daemonAbleAction();
}
});
exitItem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
exitAction();
}
});
ConflictDaemon.getInstance().addListener(this);
}
performCalculations();
}
/**
* Creates and starts a new timer (throws away the old one). The timer fires an update every refresh seconds, unless there is a pending task.
*/
private void createTimer() {
// note that the timer works in milliseconds and the argument is in seconds
boolean pTask = false;
// check if anything is PENDING (first local states then relationships
for (LocalStateResult localState : ConflictDaemon.getInstance().getLocalStates()) {
if (localState.getLocalState().equals(LocalStateResult.PENDING)) {
pTask = true;
}
}
for (Relationship relationship : ConflictDaemon.getInstance().getRelationships()) {
if (!(relationship.isReady())) {
pTask = true;
}
}
final boolean pendingTask = pTask;
if (_timer != null) {
_timer.stop();
_timer = null;
}
_timer = new Timer((int) ClientPreferences.REFRESH * 1000, new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
_log.info("Timer fired at: " + TimeUtility.getCurrentLSMRDateString());
if (!pendingTask) {
// if tasks are pending don't start the calculations again
performCalculations();
}
}
});
_timer.setInitialDelay((int) ClientPreferences.REFRESH * 1000);
_timer.start();
long nextFire = System.currentTimeMillis() + _timer.getDelay();
_log.info("Timer created - will fire in: " + TimeUtility.msToHumanReadable(_timer.getInitialDelay()) + " (@ "
+ new SimpleDateFormat("HH:mm:ss").format(new Date(nextFire)) + ")");
}
/**
* A listener for clicking the menu to enable the deamon.
*/
public void daemonAbleAction() {
if (daemonEnabledItem.getLabel().equals("Enable Daemon")) {
// daemon enabled
_log.info("ConflictDaemon enabled");
daemonEnabledItem.setLabel("Disable Daemon");
_client.setDaemonEnabled(true);
if (_timer != null) {
// do it
_timer.start();
} else {
createTimer();
}
} else {
// daemon disabled
_log.info("ConflictDaemon disabled");
daemonEnabledItem.setLabel("Enable Daemon");
_client.setDaemonEnabled(false);
if (_timer != null) {
_timer.stop();
_timer = null;
}
// for (CalculateTask ct : tasks) {
// _log.info("disabling ct of state: " + ct.getState());
// ct.cancel(true);
// }
update();
}
}
/**
* A listener for clicking the menu to exit.
*/
public void exitAction() {
if (TRAY_SUPPORTED)
_tray.remove(_trayIcon);
String msg = "ConflictClient exited successfully.";
System.out.println(msg);
_log.trace("Exit action selected");
quit(0);
}
/**
* If the deamon is not running, does nothing. If the deamon is running, creates a new executor and performs the calculations on all repos of all
* projects of the current configuration.
*/
public void performCalculations() {
// if the daemon is disabled, don't perform calculations.
if (daemonEnabledItem.getLabel().equals("Enable Daemon")) {
return;
}
// if the deamon is enabled.
// # lines marked with //# are removed to simplify the execution process
// # Executor ex = new SerialExecutor();
refreshItem.setLabel("Refreshing...");
_log.trace("refresh text: " + refreshItem.getLabel());
refreshItem.setEnabled(false);
_client.setCanUpdate(false);
startCalculations = System.currentTimeMillis();
// for (ProjectPreferences projPref : _prefs.getProjectPreference()) {
// final CalculateLocalStateTask clst = new CalculateLocalStateTask(projPref, this, _client);
// ex.execute(clst);
//
// for (final DataSource source : projPref.getDataSources()) {
// final CalculateRelationshipTask crt = new CalculateRelationshipTask(source, projPref, this, _client);
// ex.execute(crt);
// }
// }
for (ProjectPreferences projPref : _prefs.getProjectPreference()) {
final CalculateProjectTask cpt = new CalculateProjectTask(projPref, this, _client);
// # ex.execute(cpt);
cpt.execute();
}
}
/**
* Either creates a new one (if one did not exist) or displays the existing GUI configuration editor.
*/
public void preferencesAction() {
PreferencesGUIEditorFrame.getPreferencesGUIEditorFrame(_prefs);
}
/**
* Quit Crystal with a status.
*
* @param status
* : the exit status (0 means normal).
*/
private void quit(int status) {
_log.info("ConflictSystemTray exited - code: " + status + " at: " + TimeUtility.getCurrentLSMRDateString());
System.exit(status);
}
/**
* Show the client and set up the timer.
*/
private void showClient() {
_log.info("Show client requested");
if (_client != null) {
_client.show();
} else {
_client = new ConflictClient();
_client.createAndShowGUI(_prefs);
}
}
/**
* Updates the images and tool tips of all the projects and all the repositories within the current configuration.
*/
@Override
public void update() {
_log.trace("ConflictSystemTray::update()");
// _log.trace("Task size in update: " + tasks.size());
// check if anything is PENDING (first local states then relationships
boolean pendingTask = false;
for (LocalStateResult localState : ConflictDaemon.getInstance().getLocalStates()) {
if (localState.getLocalState().equals(LocalStateResult.PENDING)) {
pendingTask = true;
}
}
for (Relationship relationship : ConflictDaemon.getInstance().getRelationships()) {
if (relationship.getName().equals(Relationship.PENDING)) {
pendingTask = true;
}
}
if (pendingTask) {
_log.trace("Update called with tasks still pending.");
// keep the UI in updating mode
refreshItem.setLabel("Refreshing...");
refreshItem.setEnabled(false);
_client.setCanUpdate(false);
} else {
_log.trace("Update called with no tasks pending.");
createTimer();
refreshItem.setLabel("Refresh");
refreshItem.setEnabled(true);
_client.setCanUpdate(true);
}
if (TRAY_SUPPORTED)
updateTrayIcon();
if (_client != null) {
_client.update();
}
}
/**
* Updates the tray icon image to the harshest relationship in the current configuration.
*/
private void updateTrayIcon() {
if (!TRAY_SUPPORTED)
return;
_trayIcon.getImage().flush();
Image icon = Relationship.getDominant(ConflictDaemon.getInstance().getRelationships());
_trayIcon.setImage(icon);
}
/**
* @return the single instance of ConflictSystemTray
*/
public static ConflictSystemTray getInstance() {
if (_instance == null) {
_instance = new ConflictSystemTray();
}
return _instance;
}
/**
* Main execution point that starts Crystal.
*
* @param args
* : --version : Prints the version number.
*/
public static void main(String[] args) {
if (args.length > 0) {
if (args[0].equals("--version")) {
System.out.println("Crystal version: " + VERSION_ID);
System.exit(0);
}
}
ConflictSystemTray.startLogging();
// UIManager.put("swing.boldMetal", Boolean.FALSE);
ConflictSystemTray cst = ConflictSystemTray.getInstance();
cst.createAndShowGUI();
}
public static void startLogging() {
LSMRLogger.startLog4J(Constants.QUIET_CONSOLE, true, Constants.LOG_LEVEL, System.getProperty("user.home"), ".conflictClientLog");
}
}
|
Updated the version number.
|
src/crystal/client/ConflictSystemTray.java
|
Updated the version number.
|
|
Java
|
mit
|
49fe483a3f36660895c4316c00cfd23b839cc5c7
| 0
|
CMPUT301F17T14/gitrekt
|
package com.example.habitrack;
import android.content.Context;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Calendar;
/**
* Created by sshussai on 11/29/17.
*/
/**
* Steps to add a new object:
* 1. Declare a base object
* 2. Declare a public final MODE value
* 2. Declare a private final FILENAME value
* 3. Add the if case to the save function. Modify as needed
* 4. Add the if case to the load function. Modify as needed
*/
public class FileManager {
private Context ctx;
// 1. Base objects
private ArrayList<HabitTypeMetadata> habitTypeMetadata;
private ArrayList<HabitEvent> recentHabitEvents;
private ArrayList<HabitEvent> todayHabitEvents;
// 2. MODES
public final Integer HT_METADATA_MODE = 100;
public final Integer RECENT_HE_MODE = 200;
public final Integer TODAY_HE_MODE = 300;
// 3. FILENAMES
private String filename;
private final String HT_METADATA_FILE = "htmetadata.sav";
private final String RECENT_HE_FILE = "recenthabitevents.sav";
private final String TODAY_HE_FILE = "todayhabitevents.sav";
// Constructor
public FileManager(Context context) {
this.ctx = context;
}
public void save(Integer mode){
// 4. If cases for the save function
if(mode == HT_METADATA_MODE){
habitTypeMetadata = HabitTypeStateManager.getHTStateManager().getAllMetadata();
filename = HT_METADATA_FILE;
} else if(mode == RECENT_HE_MODE){
recentHabitEvents = HabitEventStateManager.getHEStateManager().getRecentHabitevents();
filename = RECENT_HE_FILE;
} else if(mode == TODAY_HE_MODE){
todayHabitEvents = HabitEventStateManager.getHEStateManager().getTodayHabitevents();
filename = TODAY_HE_FILE;
}
try {
FileOutputStream fos = ctx.openFileOutput(filename,0);
OutputStreamWriter writer = new OutputStreamWriter(fos);
Gson gson = new Gson();
if(mode == HT_METADATA_MODE) {
gson.toJson(habitTypeMetadata, writer);
} else if (mode == RECENT_HE_MODE){
gson.toJson(recentHabitEvents, writer);
} else if(mode == TODAY_HE_MODE){
gson.toJson(todayHabitEvents, writer);
}
writer.flush();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
throw new RuntimeException();
} catch (IOException e) {
// TODO Auto-generated catch block
throw new RuntimeException();
}
}
public void load(Integer mode) {
// 5. If cases for load function
if(mode == HT_METADATA_MODE){
filename = HT_METADATA_FILE;
} else if(mode == RECENT_HE_MODE){
filename = RECENT_HE_FILE;
} else if(mode == TODAY_HE_MODE){
filename = TODAY_HE_FILE;
}
try {
FileInputStream fis = ctx.openFileInput(filename);
BufferedReader in = new BufferedReader(new InputStreamReader(fis));
Gson gson = new Gson();
//Code taken from http://stackoverflow.com/questions/12384064/gson-convert-from-json-to-a-typed-arraylistt Sept.22,2016
if(mode == HT_METADATA_MODE) {
Type calType = new TypeToken<ArrayList<HabitTypeMetadata>>() {}.getType();
habitTypeMetadata = gson.fromJson(in, calType);
} else if (mode == RECENT_HE_MODE){
Type calType = new TypeToken<ArrayList<HabitEvent>>() {}.getType();
recentHabitEvents = gson.fromJson(in, calType);
} else if (mode == TODAY_HE_MODE){
Type calType = new TypeToken<ArrayList<HabitEvent>>() {}.getType();
todayHabitEvents = gson.fromJson(in, calType);
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
if(mode == HT_METADATA_MODE){
habitTypeMetadata = new ArrayList<HabitTypeMetadata>();
} else if(mode == RECENT_HE_MODE){
recentHabitEvents = new ArrayList<HabitEvent>();
} else if(mode == TODAY_HE_MODE){
todayHabitEvents = new ArrayList<HabitEvent>();
}
} catch (IOException e) {
// TODO Auto-generated catch block
throw new RuntimeException();
}
if(mode == HT_METADATA_MODE){
HabitTypeStateManager.getHTStateManager().setHtMetadata(habitTypeMetadata);
} else if(mode == RECENT_HE_MODE){
HabitEventStateManager.getHEStateManager().setRecentHabitEvents(recentHabitEvents);
} else if(mode == TODAY_HE_MODE){
HabitEventStateManager.getHEStateManager().setTodayHabitEvents(todayHabitEvents);
}
}
}
|
HabiTrack/app/src/main/java/com/example/habitrack/FileManager.java
|
package com.example.habitrack;
import android.content.Context;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Calendar;
/**
* Created by sshussai on 11/29/17.
*/
/**
* Steps to add a new object:
* 1. Declare a base object
* 2. Declare a public final MODE value
* 2. Declare a private final FILENAME value
* 3. Add the if case to the save function. Modify as needed
* 4. Add the if case to the load function. Modify as needed
*/
public class FileManager {
private Context ctx;
// 1. Base objects
private ArrayList<HabitTypeMetadata> habitTypeMetadata;
private ArrayList<HabitEvent> recentHabitEvents;
private ArrayList<HabitEvent> todayHabitEvents;
// 2. MODES
public final Integer HT_METADATA_MODE = 100;
public final Integer RECENT_HE_MODE = 200;
public final Integer TODAY_HE_MODE = 300;
// 3. FILENAMES
private String filename;
private final String HT_METADATA_FILE = "htmetadata.sav";
private final String RECENT_HE_FILE = "recenthabitevents.sav";
private final String TODAY_HE_FILE = "todayhabitevents.sav";
// Constructor
public FileManager(Context context) {
this.ctx = context;
}
public void save(Integer mode){
// 4. If cases for the save function
if(mode == HT_METADATA_MODE){
habitTypeMetadata = HabitTypeStateManager.getHTStateManager().getAllMetadata();
filename = HT_METADATA_FILE;
} else if(mode == RECENT_HE_MODE){
recentHabitEvents = HabitEventStateManager.getHEStateManager().getRecentHabitevents();
filename = RECENT_HE_FILE;
} else if(mode == TODAY_HE_MODE){
todayHabitEvents = HabitEventStateManager.getHEStateManager().getTodayHabitevents();
}
try {
FileOutputStream fos = ctx.openFileOutput(filename,0);
OutputStreamWriter writer = new OutputStreamWriter(fos);
Gson gson = new Gson();
if(mode == HT_METADATA_MODE) {
gson.toJson(habitTypeMetadata, writer);
} else if (mode == RECENT_HE_MODE){
gson.toJson(recentHabitEvents, writer);
} else if(mode == TODAY_HE_MODE){
gson.toJson(todayHabitEvents, writer);
}
writer.flush();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
throw new RuntimeException();
} catch (IOException e) {
// TODO Auto-generated catch block
throw new RuntimeException();
}
}
public void load(Integer mode) {
// 5. If cases for load function
if(mode == HT_METADATA_MODE){
filename = HT_METADATA_FILE;
} else if(mode == RECENT_HE_MODE){
filename = RECENT_HE_FILE;
} else if(mode == TODAY_HE_MODE){
filename = TODAY_HE_FILE;
}
try {
FileInputStream fis = ctx.openFileInput(filename);
BufferedReader in = new BufferedReader(new InputStreamReader(fis));
Gson gson = new Gson();
//Code taken from http://stackoverflow.com/questions/12384064/gson-convert-from-json-to-a-typed-arraylistt Sept.22,2016
if(mode == HT_METADATA_MODE) {
Type calType = new TypeToken<ArrayList<HabitTypeMetadata>>() {}.getType();
habitTypeMetadata = gson.fromJson(in, calType);
} else if (mode == RECENT_HE_MODE){
Type calType = new TypeToken<ArrayList<HabitEvent>>() {}.getType();
recentHabitEvents = gson.fromJson(in, calType);
} else if (mode == TODAY_HE_MODE){
Type calType = new TypeToken<ArrayList<HabitEvent>>() {}.getType();
todayHabitEvents = gson.fromJson(in, calType);
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
if(mode == HT_METADATA_MODE){
habitTypeMetadata = new ArrayList<HabitTypeMetadata>();
} else if(mode == RECENT_HE_MODE){
recentHabitEvents = new ArrayList<HabitEvent>();
} else if(mode == TODAY_HE_MODE){
todayHabitEvents = new ArrayList<HabitEvent>();
}
} catch (IOException e) {
// TODO Auto-generated catch block
throw new RuntimeException();
}
if(mode == HT_METADATA_MODE){
HabitTypeStateManager.getHTStateManager().setHtMetadata(habitTypeMetadata);
} else if(mode == RECENT_HE_MODE){
HabitEventStateManager.getHEStateManager().setRecentHabitEvents(recentHabitEvents);
} else if(mode == TODAY_HE_MODE){
HabitEventStateManager.getHEStateManager().setTodayHabitEvents(todayHabitEvents);
}
}
}
|
fix bug in saving todayhe function
|
HabiTrack/app/src/main/java/com/example/habitrack/FileManager.java
|
fix bug in saving todayhe function
|
|
Java
|
epl-1.0
|
df84eae6270fe49c6d60903eed06b70680c5cc99
| 0
|
asupdev/asup,asupdev/asup,asupdev/asup
|
/**
* Copyright (c) 2012, 2014 Sme.UP and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.asup.os.type.file.impl;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collection;
import java.util.List;
import org.asup.il.core.QFacet;
import org.asup.il.core.QIntegratedLanguageCorePackage;
import org.asup.il.core.QNamedNode;
import org.asup.il.core.QNode;
import org.asup.os.type.file.QFile;
import org.asup.os.type.file.QOperatingSystemFilePackage;
import org.asup.os.type.impl.TypedObjectImpl;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>QFile</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.asup.os.type.file.impl.FileImpl#getFacets <em>Facets</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public abstract class FileImpl extends TypedObjectImpl implements QFile {
/**
* The cached value of the '{@link #getFacets() <em>Facets</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getFacets()
* @generated
* @ordered
*/
protected EList<QFacet> facets;
/**
*
*/
private static final long serialVersionUID = 1L;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected FileImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return QOperatingSystemFilePackage.Literals.FILE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public List<QFacet> getFacets() {
if (facets == null) {
facets = new EObjectContainmentEList<QFacet>(QFacet.class, this, QOperatingSystemFilePackage.FILE__FACETS);
}
return facets;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public URI getClassURI() {
try {
// TODO
// eClass().getEPackage().getNsURI();
String classURI = "file/"+ getAttribute().toLowerCase()+"/" + getApplication() + "/" + getName();
URI uri = new URI(classURI);
return uri;
} catch (URISyntaxException e) {
return null;
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public URI getPackageInfoURI() {
try {
String classURI = "file/"+ getAttribute().toLowerCase()+"/" + getApplication();
URI uri = new URI(classURI);
return uri;
} catch (URISyntaxException e) {
return null;
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public <F extends QFacet> F getFacet(Class<F> klass) {
// TODO: implement this method
// Ensure that you remove @generated or mark it @generated NOT
throw new UnsupportedOperationException();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public QNode getParent() {
// TODO: implement this method
// Ensure that you remove @generated or mark it @generated NOT
throw new UnsupportedOperationException();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isChild() {
// TODO: implement this method
// Ensure that you remove @generated or mark it @generated NOT
throw new UnsupportedOperationException();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case QOperatingSystemFilePackage.FILE__FACETS:
return ((InternalEList<?>)getFacets()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case QOperatingSystemFilePackage.FILE__FACETS:
return getFacets();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case QOperatingSystemFilePackage.FILE__FACETS:
getFacets().clear();
getFacets().addAll((Collection<? extends QFacet>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case QOperatingSystemFilePackage.FILE__FACETS:
getFacets().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case QOperatingSystemFilePackage.FILE__FACETS:
return facets != null && !facets.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int eBaseStructuralFeatureID(int derivedFeatureID, Class<?> baseClass) {
if (baseClass == QNode.class) {
switch (derivedFeatureID) {
case QOperatingSystemFilePackage.FILE__FACETS: return QIntegratedLanguageCorePackage.NODE__FACETS;
default: return -1;
}
}
if (baseClass == QNamedNode.class) {
switch (derivedFeatureID) {
default: return -1;
}
}
return super.eBaseStructuralFeatureID(derivedFeatureID, baseClass);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int eDerivedStructuralFeatureID(int baseFeatureID, Class<?> baseClass) {
if (baseClass == QNode.class) {
switch (baseFeatureID) {
case QIntegratedLanguageCorePackage.NODE__FACETS: return QOperatingSystemFilePackage.FILE__FACETS;
default: return -1;
}
}
if (baseClass == QNamedNode.class) {
switch (baseFeatureID) {
default: return -1;
}
}
return super.eDerivedStructuralFeatureID(baseFeatureID, baseClass);
}
} //QFileImpl
|
org.asup.os.type.file/src/org/asup/os/type/file/impl/FileImpl.java
|
/**
* Copyright (c) 2012, 2014 Sme.UP and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.asup.os.type.file.impl;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collection;
import java.util.List;
import org.asup.il.core.QFacet;
import org.asup.il.core.QIntegratedLanguageCorePackage;
import org.asup.il.core.QNamedNode;
import org.asup.il.core.QNode;
import org.asup.os.type.file.QFile;
import org.asup.os.type.file.QOperatingSystemFilePackage;
import org.asup.os.type.impl.TypedObjectImpl;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>QFile</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link org.asup.os.type.file.impl.FileImpl#getFacets <em>Facets</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public abstract class FileImpl extends TypedObjectImpl implements QFile {
/**
* The cached value of the '{@link #getFacets() <em>Facets</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getFacets()
* @generated
* @ordered
*/
protected EList<QFacet> facets;
/**
*
*/
private static final long serialVersionUID = 1L;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected FileImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return QOperatingSystemFilePackage.Literals.FILE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public List<QFacet> getFacets() {
if (facets == null) {
facets = new EObjectContainmentEList<QFacet>(QFacet.class, this, QOperatingSystemFilePackage.FILE__FACETS);
}
return facets;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public URI getClassURI() {
try {
// TODO
// eClass().getEPackage().getNsURI();
String classURI = "file/"+ getAttribute().toLowerCase()+"/" + getApplication() + "/" + getName();
URI uri = new URI(classURI);
return uri;
} catch (URISyntaxException e) {
return null;
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
public URI getPackageInfoURI() {
try {
String classURI = "file/"+ getAttribute().toLowerCase()+"/" + getApplication();
URI uri = new URI(classURI);
return uri;
} catch (URISyntaxException e) {
return null;
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public <F extends QFacet> F getFacet(Class<F> klass) {
// TODO: implement this method
// Ensure that you remove @generated or mark it @generated NOT
throw new UnsupportedOperationException();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public QNode getParent() {
// TODO: implement this method
// Ensure that you remove @generated or mark it @generated NOT
throw new UnsupportedOperationException();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isChild() {
// TODO: implement this method
// Ensure that you remove @generated or mark it @generated NOT
throw new UnsupportedOperationException();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case QOperatingSystemFilePackage.FILE__FACETS:
return ((InternalEList<?>)getFacets()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case QOperatingSystemFilePackage.FILE__FACETS:
return getFacets();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case QOperatingSystemFilePackage.FILE__FACETS:
getFacets().clear();
getFacets().addAll((Collection<? extends QFacet>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case QOperatingSystemFilePackage.FILE__FACETS:
getFacets().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case QOperatingSystemFilePackage.FILE__FACETS:
return facets != null && !facets.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int eBaseStructuralFeatureID(int derivedFeatureID, Class<?> baseClass) {
if (baseClass == QNode.class) {
switch (derivedFeatureID) {
default: return -1;
}
}
if (baseClass == QNamedNode.class) {
switch (derivedFeatureID) {
case QOperatingSystemFilePackage.FILE__FACETS: return QIntegratedLanguageCorePackage.NAMED_NODE__FACETS;
default: return -1;
}
}
return super.eBaseStructuralFeatureID(derivedFeatureID, baseClass);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public int eDerivedStructuralFeatureID(int baseFeatureID, Class<?> baseClass) {
if (baseClass == QNode.class) {
switch (baseFeatureID) {
default: return -1;
}
}
if (baseClass == QNamedNode.class) {
switch (baseFeatureID) {
case QIntegratedLanguageCorePackage.NAMED_NODE__FACETS: return QOperatingSystemFilePackage.FILE__FACETS;
default: return -1;
}
}
return super.eDerivedStructuralFeatureID(baseFeatureID, baseClass);
}
} //QFileImpl
|
Rigenerato codice da os-type-file.ecore
|
org.asup.os.type.file/src/org/asup/os/type/file/impl/FileImpl.java
|
Rigenerato codice da os-type-file.ecore
|
|
Java
|
epl-1.0
|
d119f0f277321b2c2cf2986269bd84dca5eee2f5
| 0
|
szeder/cJUnit,szeder/cJUnit-dev
|
/*
* This file is covered by the terms of the Common Public License v1.0.
*
* Copyright (c) SZEDER Gábor
*
* Parts of this software were developed within the JEOPARD research
* project, which received funding from the European Union's Seventh
* Framework Programme under grant agreement No. 216682.
*/
package de.fzi.cjunit.jpf.exceptioninfo;
public class ExceptionInfoDefaultImpl implements ExceptionInfo {
String className;
String message;
ExceptionInfo cause;
StackTraceElementInfo[] stackTrace;
public ExceptionInfoDefaultImpl(String className, String message,
StackTraceElementInfo[] stackTrace,
ExceptionInfo cause) {
this.className = className;
this.message = message;
this.stackTrace = stackTrace;
this.cause = cause;
}
public ExceptionInfoDefaultImpl(Throwable t) {
className = t.getClass().getName();
message = t.getMessage();
if (t.getCause() != null) {
cause = new ExceptionInfoDefaultImpl(t.getCause());
}
StackTraceElement[] origStackTrace = t.getStackTrace();
stackTrace = new StackTraceElementInfo[origStackTrace.length];
for (int i = 0; i < origStackTrace.length; i++) {
stackTrace[i] = new StackTraceElementInfoDefaultImpl(
origStackTrace[i]);
}
}
public ExceptionInfoDefaultImpl(ExceptionInfo other) {
className = other.getClassName();
message = other.getMessage();
if (other.hasCause()) {
cause = new ExceptionInfoDefaultImpl(other.getCause());
}
StackTraceElementInfo[] origStackTrace = other.getStackTrace();
stackTrace = new StackTraceElementInfo[origStackTrace.length];
for (int i = 0; i < origStackTrace.length; i++) {
stackTrace[i] = new StackTraceElementInfoDefaultImpl(
origStackTrace[i]);
}
}
@Override
public String getClassName() {
return className;
}
@Override
public String getMessage() {
return message;
}
@Override
public StackTraceElementInfo[] getStackTrace() {
return stackTrace;
}
@Override
public boolean hasCause() {
return cause != null;
}
@Override
public ExceptionInfo getCause() {
return cause;
}
}
|
src/main/de/fzi/cjunit/jpf/exceptioninfo/ExceptionInfoDefaultImpl.java
|
/*
* This file is covered by the terms of the Common Public License v1.0.
*
* Copyright (c) SZEDER Gábor
*
* Parts of this software were developed within the JEOPARD research
* project, which received funding from the European Union's Seventh
* Framework Programme under grant agreement No. 216682.
*/
package de.fzi.cjunit.jpf.exceptioninfo;
public class ExceptionInfoDefaultImpl implements ExceptionInfo {
String className;
String message;
ExceptionInfo cause;
StackTraceElementInfo[] stackTrace;
public ExceptionInfoDefaultImpl(Throwable t) {
className = t.getClass().getName();
message = t.getMessage();
if (t.getCause() != null) {
cause = new ExceptionInfoDefaultImpl(t.getCause());
}
StackTraceElement[] origStackTrace = t.getStackTrace();
stackTrace = new StackTraceElementInfo[origStackTrace.length];
for (int i = 0; i < origStackTrace.length; i++) {
stackTrace[i] = new StackTraceElementInfoDefaultImpl(
origStackTrace[i]);
}
}
public ExceptionInfoDefaultImpl(ExceptionInfo other) {
className = other.getClassName();
message = other.getMessage();
if (other.hasCause()) {
cause = new ExceptionInfoDefaultImpl(other.getCause());
}
StackTraceElementInfo[] origStackTrace = other.getStackTrace();
stackTrace = new StackTraceElementInfo[origStackTrace.length];
for (int i = 0; i < origStackTrace.length; i++) {
stackTrace[i] = new StackTraceElementInfoDefaultImpl(
origStackTrace[i]);
}
}
@Override
public String getClassName() {
return className;
}
@Override
public String getMessage() {
return message;
}
@Override
public StackTraceElementInfo[] getStackTrace() {
return stackTrace;
}
@Override
public boolean hasCause() {
return cause != null;
}
@Override
public ExceptionInfo getCause() {
return cause;
}
}
|
ExceptionInfoDefaultImpl: add alternative constructor
... to be able to instantiate based on a given class name, message,
stack trace and cause.
Signed-off-by: SZEDER Gábor <46692b1f0516358b02a6405518c0a52aa8fb3e94@fzi.de>
|
src/main/de/fzi/cjunit/jpf/exceptioninfo/ExceptionInfoDefaultImpl.java
|
ExceptionInfoDefaultImpl: add alternative constructor
|
|
Java
|
mpl-2.0
|
056f88a2bd9381beb55b0a804e3d89ec3e97f5a4
| 0
|
kigsmtua/openmrs-core,lbl52001/openmrs-core,Winbobob/openmrs-core,WANeves/openmrs-core,foolchan2556/openmrs-core,sintjuri/openmrs-core,maekstr/openmrs-core,dlahn/openmrs-core,pselle/openmrs-core,MuhammadSafwan/Stop-Button-Ability,dlahn/openmrs-core,kckc/openmrs-core,milankarunarathne/openmrs-core,milankarunarathne/openmrs-core,foolchan2556/openmrs-core,sintjuri/openmrs-core,maany/openmrs-core,koskedk/openmrs-core,ern2/openmrs-core,maany/openmrs-core,kristopherschmidt/openmrs-core,kristopherschmidt/openmrs-core,ssmusoke/openmrs-core,donaldgavis/openmrs-core,michaelhofer/openmrs-core,aj-jaswanth/openmrs-core,maekstr/openmrs-core,trsorsimoII/openmrs-core,macorrales/openmrs-core,naraink/openmrs-core,asifur77/openmrs,prisamuel/openmrs-core,kabariyamilind/openMRSDEV,sintjuri/openmrs-core,WANeves/openmrs-core,sadhanvejella/openmrs,iLoop2/openmrs-core,donaldgavis/openmrs-core,naraink/openmrs-core,donaldgavis/openmrs-core,kristopherschmidt/openmrs-core,nilusi/Legacy-UI,alexei-grigoriev/openmrs-core,joansmith/openmrs-core,lilo2k/openmrs-core,sadhanvejella/openmrs,vinayvenu/openmrs-core,preethi29/openmrs-core,hoquangtruong/TestMylyn,macorrales/openmrs-core,alexwind26/openmrs-core,spereverziev/openmrs-core,ldf92/openmrs-core,jvena1/openmrs-core,trsorsimoII/openmrs-core,joansmith/openmrs-core,joansmith/openmrs-core,macorrales/openmrs-core,foolchan2556/openmrs-core,andyvand/OpenMRS,michaelhofer/openmrs-core,iLoop2/openmrs-core,kristopherschmidt/openmrs-core,andyvand/OpenMRS,donaldgavis/openmrs-core,ldf92/openmrs-core,chethandeshpande/openmrs-core,AbhijitParate/openmrs-core,jcantu1988/openmrs-core,lilo2k/openmrs-core,sadhanvejella/openmrs,shiangree/openmrs-core,siddharthkhabia/openmrs-core,AbhijitParate/openmrs-core,lbl52001/openmrs-core,jcantu1988/openmrs-core,jamesfeshner/openmrs-module,pselle/openmrs-core,dcmul/openmrs-core,foolchan2556/openmrs-core,koskedk/openmrs-core,Bhamni/openmrs-core,kabariyamilind/openMRSDEV,ssmusoke/openmrs-core,lilo2k/openmrs-core,geoff-wasilwa/openmrs-core,rbtracker/openmrs-core,kigsmtua/openmrs-core,andyvand/OpenMRS,hoquangtruong/TestMylyn,naraink/openmrs-core,aboutdata/openmrs-core,alexei-grigoriev/openmrs-core,jembi/openmrs-core,asifur77/openmrs,Ch3ck/openmrs-core,maekstr/openmrs-core,AbhijitParate/openmrs-core,MitchellBot/openmrs-core,alexei-grigoriev/openmrs-core,koskedk/openmrs-core,siddharthkhabia/openmrs-core,maekstr/openmrs-core,joansmith/openmrs-core,aboutdata/openmrs-core,kigsmtua/openmrs-core,rbtracker/openmrs-core,kigsmtua/openmrs-core,MitchellBot/openmrs-core,milankarunarathne/openmrs-core,kigsmtua/openmrs-core,hoquangtruong/TestMylyn,vinayvenu/openmrs-core,lbl52001/openmrs-core,dlahn/openmrs-core,WANeves/openmrs-core,Bhamni/openmrs-core,jamesfeshner/openmrs-module,kabariyamilind/openMRSDEV,koskedk/openmrs-core,alexei-grigoriev/openmrs-core,kabariyamilind/openMRSDEV,trsorsimoII/openmrs-core,dcmul/openmrs-core,jvena1/openmrs-core,hoquangtruong/TestMylyn,jembi/openmrs-core,preethi29/openmrs-core,Negatu/openmrs-core,dcmul/openmrs-core,ern2/openmrs-core,Bhamni/openmrs-core,ern2/openmrs-core,Openmrs-joel/openmrs-core,Negatu/openmrs-core,milankarunarathne/openmrs-core,ssmusoke/openmrs-core,lbl52001/openmrs-core,pselle/openmrs-core,siddharthkhabia/openmrs-core,rbtracker/openmrs-core,preethi29/openmrs-core,jamesfeshner/openmrs-module,foolchan2556/openmrs-core,AbhijitParate/openmrs-core,kabariyamilind/openMRSDEV,rbtracker/openmrs-core,jvena1/openmrs-core,shiangree/openmrs-core,spereverziev/openmrs-core,nilusi/Legacy-UI,maany/openmrs-core,Ch3ck/openmrs-core,spereverziev/openmrs-core,prisamuel/openmrs-core,milankarunarathne/openmrs-core,chethandeshpande/openmrs-core,sadhanvejella/openmrs,MuhammadSafwan/Stop-Button-Ability,Winbobob/openmrs-core,shiangree/openmrs-core,michaelhofer/openmrs-core,kckc/openmrs-core,hoquangtruong/TestMylyn,jembi/openmrs-core,nilusi/Legacy-UI,kristopherschmidt/openmrs-core,maekstr/openmrs-core,ern2/openmrs-core,jcantu1988/openmrs-core,dlahn/openmrs-core,prisamuel/openmrs-core,geoff-wasilwa/openmrs-core,asifur77/openmrs,Bhamni/openmrs-core,andyvand/OpenMRS,Winbobob/openmrs-core,AbhijitParate/openmrs-core,trsorsimoII/openmrs-core,Ch3ck/openmrs-core,alexwind26/openmrs-core,ldf92/openmrs-core,ssmusoke/openmrs-core,geoff-wasilwa/openmrs-core,Openmrs-joel/openmrs-core,sravanthi17/openmrs-core,jvena1/openmrs-core,nilusi/Legacy-UI,maekstr/openmrs-core,jcantu1988/openmrs-core,Openmrs-joel/openmrs-core,dcmul/openmrs-core,kckc/openmrs-core,MuhammadSafwan/Stop-Button-Ability,dcmul/openmrs-core,aboutdata/openmrs-core,ern2/openmrs-core,aboutdata/openmrs-core,andyvand/OpenMRS,koskedk/openmrs-core,alexwind26/openmrs-core,vinayvenu/openmrs-core,kckc/openmrs-core,aj-jaswanth/openmrs-core,prisamuel/openmrs-core,koskedk/openmrs-core,spereverziev/openmrs-core,alexwind26/openmrs-core,lilo2k/openmrs-core,Negatu/openmrs-core,sadhanvejella/openmrs,dcmul/openmrs-core,pselle/openmrs-core,lbl52001/openmrs-core,kigsmtua/openmrs-core,Bhamni/openmrs-core,nilusi/Legacy-UI,Winbobob/openmrs-core,WANeves/openmrs-core,aj-jaswanth/openmrs-core,ldf92/openmrs-core,iLoop2/openmrs-core,shiangree/openmrs-core,Winbobob/openmrs-core,WANeves/openmrs-core,asifur77/openmrs,foolchan2556/openmrs-core,jembi/openmrs-core,Winbobob/openmrs-core,spereverziev/openmrs-core,iLoop2/openmrs-core,andyvand/OpenMRS,michaelhofer/openmrs-core,aboutdata/openmrs-core,MitchellBot/openmrs-core,siddharthkhabia/openmrs-core,Ch3ck/openmrs-core,alexei-grigoriev/openmrs-core,shiangree/openmrs-core,prisamuel/openmrs-core,geoff-wasilwa/openmrs-core,maany/openmrs-core,chethandeshpande/openmrs-core,prisamuel/openmrs-core,pselle/openmrs-core,sravanthi17/openmrs-core,Negatu/openmrs-core,sravanthi17/openmrs-core,lbl52001/openmrs-core,trsorsimoII/openmrs-core,maany/openmrs-core,jamesfeshner/openmrs-module,MuhammadSafwan/Stop-Button-Ability,AbhijitParate/openmrs-core,preethi29/openmrs-core,MuhammadSafwan/Stop-Button-Ability,vinayvenu/openmrs-core,sintjuri/openmrs-core,macorrales/openmrs-core,kckc/openmrs-core,naraink/openmrs-core,jembi/openmrs-core,Openmrs-joel/openmrs-core,iLoop2/openmrs-core,asifur77/openmrs,MitchellBot/openmrs-core,rbtracker/openmrs-core,donaldgavis/openmrs-core,MuhammadSafwan/Stop-Button-Ability,dlahn/openmrs-core,alexei-grigoriev/openmrs-core,sravanthi17/openmrs-core,vinayvenu/openmrs-core,jembi/openmrs-core,kckc/openmrs-core,michaelhofer/openmrs-core,Negatu/openmrs-core,aj-jaswanth/openmrs-core,sadhanvejella/openmrs,nilusi/Legacy-UI,geoff-wasilwa/openmrs-core,chethandeshpande/openmrs-core,lilo2k/openmrs-core,pselle/openmrs-core,chethandeshpande/openmrs-core,jcantu1988/openmrs-core,Openmrs-joel/openmrs-core,siddharthkhabia/openmrs-core,aj-jaswanth/openmrs-core,milankarunarathne/openmrs-core,sravanthi17/openmrs-core,shiangree/openmrs-core,jvena1/openmrs-core,joansmith/openmrs-core,Ch3ck/openmrs-core,naraink/openmrs-core,sintjuri/openmrs-core,sintjuri/openmrs-core,ssmusoke/openmrs-core,alexwind26/openmrs-core,hoquangtruong/TestMylyn,preethi29/openmrs-core,WANeves/openmrs-core,siddharthkhabia/openmrs-core,ldf92/openmrs-core,aboutdata/openmrs-core,jamesfeshner/openmrs-module,macorrales/openmrs-core,spereverziev/openmrs-core,iLoop2/openmrs-core,Negatu/openmrs-core,naraink/openmrs-core,lilo2k/openmrs-core,MitchellBot/openmrs-core
|
/**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.openmrs.annotation.DisableHandlers;
import org.openmrs.api.context.Context;
import org.openmrs.api.handler.VoidHandler;
/**
* An Encounter represents one visit or interaction of a patient with a healthcare worker. Every
* encounter can have 0 to n Observations associated with it Every encounter can have 0 to n Orders
* associated with it The patientId attribute should be equal to patient.patientId and is only
* included this second time for performance increases on bulk calls.
*
* @see Obs
* @see Order
*/
public class Encounter extends BaseOpenmrsData implements java.io.Serializable {
public static final long serialVersionUID = 2L;
// Fields
private Integer encounterId;
private Date encounterDatetime;
private Patient patient;
private Integer patientId;
private Location location;
private Form form;
private EncounterType encounterType;
private Set<Order> orders;
private Set<Obs> obs;
private Visit visit;
@DisableHandlers(handlerTypes = { VoidHandler.class })
private Set<EncounterProvider> encounterProviders = new LinkedHashSet<EncounterProvider>();
// Constructors
/** default constructor */
public Encounter() {
}
/**
* @param encounterId
* @should set encounter id
*/
public Encounter(Integer encounterId) {
this.encounterId = encounterId;
}
// Property accessors
/**
* @return Returns the encounterDatetime.
*/
public Date getEncounterDatetime() {
return encounterDatetime;
}
/**
* @param encounterDatetime The encounterDatetime to set.
*/
public void setEncounterDatetime(Date encounterDatetime) {
this.encounterDatetime = encounterDatetime;
}
/**
* @return Returns the encounterId.
*/
public Integer getEncounterId() {
return encounterId;
}
/**
* @param encounterId The encounterId to set.
*/
public void setEncounterId(Integer encounterId) {
this.encounterId = encounterId;
}
/**
* @return Returns the encounterType.
*/
public EncounterType getEncounterType() {
return encounterType;
}
/**
* @param encounterType The encounterType to set.
*/
public void setEncounterType(EncounterType encounterType) {
this.encounterType = encounterType;
}
/**
* @return Returns the location.
*/
public Location getLocation() {
return location;
}
/**
* @param location The location to set.
*/
public void setLocation(Location location) {
this.location = location;
}
/**
* @return Returns a Set<Obs> of all non-voided, non-obsGroup children Obs of this Encounter
* @should not return null with null obs set
* @should get obs
* @should not get voided obs
* @should only get child obs
* @should not get child obs if child also on encounter
* @should get both child and parent obs after removing child from parent grouping
* @should get obs with two levels of hierarchy
* @should get obs with three levels of hierarchy
* @should not get voided obs with three layers of hierarchy
*/
public Set<Obs> getObs() {
Set<Obs> ret = new HashSet<Obs>();
if (this.obs != null) {
for (Obs o : this.obs)
ret.addAll(getObsLeaves(o));
// this should be all thats needed unless the encounter has been built by hand
//if (o.isVoided() == false && o.isObsGrouping() == false)
// ret.add(o);
}
return ret;
}
/**
* Convenience method to recursively get all leaf obs of this encounter. This method goes down
* into each obs and adds all non-grouping obs to the return list
*
* @param obsParent current obs to loop over
* @return list of leaf obs
*/
private List<Obs> getObsLeaves(Obs obsParent) {
List<Obs> leaves = new ArrayList<Obs>();
if (obsParent.hasGroupMembers()) {
for (Obs child : obsParent.getGroupMembers()) {
if (child.isVoided() == false) {
if (child.isObsGrouping() == false)
leaves.add(child);
else
// recurse if this is a grouping obs
leaves.addAll(getObsLeaves(child));
}
}
} else if (obsParent.isVoided() == false) {
leaves.add(obsParent);
}
return leaves;
}
/**
* Returns all Obs where Obs.encounterId = Encounter.encounterId In practice, this method should
* not be used very often...
*
* @param includeVoided specifies whether or not to include voided Obs
* @return Returns the all Obs.
* @should not return null with null obs set
* @should get obs
* @should get both parent and child obs
* @should get both parent and child with child directly on encounter
* @should get both child and parent obs after removing child from parent grouping
*/
public Set<Obs> getAllObs(boolean includeVoided) {
if (includeVoided && obs != null)
return obs;
Set<Obs> ret = new HashSet<Obs>();
if (this.obs != null) {
for (Obs o : this.obs) {
if (includeVoided)
ret.add(o);
else if (!o.isVoided())
ret.add(o);
}
}
return ret;
}
/**
* Convenience method to call {@link #getAllObs(boolean)} with a false parameter
*
* @return all non-voided obs
* @should not get voided obs
*/
public Set<Obs> getAllObs() {
return getAllObs(false);
}
/**
* Returns a Set<Obs> of all root-level Obs of an Encounter, including obsGroups
*
* @param includeVoided specifies whether or not to include voided Obs
* @return Returns all obs at top level -- will not be null
* @should not return null with null obs set
* @should get obs
* @should not get voided obs
* @should only get parents obs
* @should only return the grouped top level obs
* @should get both child and parent obs after removing child from parent grouping
*/
public Set<Obs> getObsAtTopLevel(boolean includeVoided) {
Set<Obs> ret = new HashSet<Obs>();
for (Obs o : getAllObs(includeVoided)) {
if (o.getObsGroup() == null)
ret.add(o);
}
return ret;
}
/**
* @param obs The obs to set.
*/
public void setObs(Set<Obs> obs) {
this.obs = obs;
}
/**
* Add the given Obs to the list of obs for this Encounter.
*
* @param observation the Obs to add to this encounter
* @should add obs with null values
* @should not fail with null obs
* @should set encounter attribute on obs
* @should add obs to non null initial obs set
* @should add encounter attrs to obs if attributes are null
*/
public void addObs(Obs observation) {
if (obs == null)
obs = new HashSet<Obs>();
if (observation != null) {
observation.setEncounter(this);
if (observation.getObsDatetime() == null)
observation.setObsDatetime(getEncounterDatetime());
if (observation.getPerson() == null)
observation.setPerson(getPatient());
if (observation.getLocation() == null)
observation.setLocation(getLocation());
obs.add(observation);
}
}
/**
* Remove the given observation from the list of obs for this Encounter
*
* @param observation
* @should remove obs successfully
* @should not throw error when removing null obs from empty set
* @should not throw error when removing null obs from non empty set
*/
public void removeObs(Obs observation) {
if (obs != null)
obs.remove(observation);
}
/**
* @return Returns the orders
*/
public Set<Order> getOrders() {
if (orders == null) {
return new HashSet<Order>();
}
return orders;
}
/**
* @param orders The orders to set.
*/
public void setOrders(Set<Order> orders) {
this.orders = orders;
}
/**
* Add the given Order to the list of orders for this Encounter
*
* @param order
* @should add order with null values
* @should not fail with null obs passed to add order
* @should set encounter attribute
* @should add order to non nul initial order set
*/
public void addOrder(Order order) {
if (orders == null)
orders = new HashSet<Order>();
if (order != null) {
order.setEncounter(this);
orders.add(order);
}
}
/**
* Remove the given observation from the list of orders for this Encounter
*
* @param order
* @should remove order from encounter
* @should not fail when removing null order
* @should not fail when removing non existent order
*/
public void removeOrder(Order order) {
if (orders != null)
orders.remove(order);
}
/**
* @return Returns the patient.
*/
public Patient getPatient() {
return patient;
}
/**
* @param patient The patient to set.
*/
public void setPatient(Patient patient) {
this.patient = patient;
}
/**
* @return the patientId
*/
public Integer getPatientId() {
return patientId;
}
/**
* @param patientId the patientId to set
*/
public void setPatientId(Integer patientId) {
this.patientId = patientId;
}
/**
* Basic property accessor for encounterProviders. The convenience methods getProvidersByRoles
* and getProvidersByRole are the preferred methods for getting providers. This getter is
* provided as a convenience for treating this like a DTO
*
* @return list of all existing providers on this encounter
* @see #getProvidersByRole(EncounterRole)
* @see #getProvidersByRoles()
* @since 1.9.1
*/
public Set<EncounterProvider> getEncounterProviders() {
return encounterProviders;
}
/**
* Basic property setter for encounterProviders. The convenience methods addProvider,
* removeProvider, and setProvider are the preferred methods for adding/removing providers. This
* setter is provided as a convenience for treating this like a DTO
*
* @param encounterProviders the list of EncounterProvider objects to set. Overwrites list as
* normal setter is inclined to do
* @see #addProvider(EncounterRole, Provider)
* @see #removeProvider(EncounterRole, Provider)
* @see #setProvider(EncounterRole, Provider)
* @since 1.9.1
*/
public void setEncounterProviders(Set<EncounterProvider> encounterProviders) {
this.encounterProviders = encounterProviders;
}
/**
* @return Returns the provider.
* @since 1.6 (used to return User)
* @deprecated since 1.9, use {@link #getProvidersByRole(EncounterRole)}
* @should return null if there is no providers
* @should return provider for person
* @should return null if there is no provider for person
* @should return same provider for person if called twice
*/
public Person getProvider() {
if (encounterProviders == null || encounterProviders.isEmpty()) {
return null;
} else {
for (EncounterProvider encounterProvider : encounterProviders) {
//Return the first person in the list
if (encounterProvider.getProvider().getPerson() != null) {
return encounterProvider.getProvider().getPerson();
}
}
}
return null;
}
/**
* @param provider The provider to set.
* @deprecated use {@link #setProvider(Person)}
*/
public void setProvider(User provider) {
setProvider(provider.getPerson());
}
/**
* @param provider The provider to set.
* @deprecated since 1.9, use {@link #setProvider(EncounterRole, Provider)}
* @should set existing provider for unknown role
*/
public void setProvider(Person provider) {
EncounterRole unknownRole = Context.getEncounterService().getEncounterRoleByUuid(
EncounterRole.UNKNOWN_ENCOUNTER_ROLE_UUID);
if (unknownRole == null) {
throw new IllegalStateException("No 'Unknown' encounter role with uuid "
+ EncounterRole.UNKNOWN_ENCOUNTER_ROLE_UUID + ".");
}
Collection<Provider> providers = Context.getProviderService().getProvidersByPerson(provider);
if (providers == null || providers.isEmpty()) {
throw new IllegalArgumentException("No provider with personId " + provider.getPersonId());
}
setProvider(unknownRole, providers.iterator().next());
}
/**
* @return Returns the form.
*/
public Form getForm() {
return form;
}
/**
* @param form The form to set.
*/
public void setForm(Form form) {
this.form = form;
}
/**
* @see java.lang.Object#toString()
* @should not fail with empty object
*/
@Override
public String toString() {
String ret = "";
ret += encounterId == null ? "(no ID) " : encounterId.toString() + " ";
ret += this.getEncounterDatetime() == null ? "(no Date) " : this.getEncounterDatetime().toString() + " ";
ret += this.getEncounterType() == null ? "(no Type) " : this.getEncounterType().getName() + " ";
ret += this.getLocation() == null ? "(no Location) " : this.getLocation().getName() + " ";
ret += this.getPatient() == null ? "(no Patient) " : this.getPatient().getPatientId().toString() + " ";
ret += this.getForm() == null ? "(no Form) " : this.getForm().getName() + " ";
ret += this.getObsAtTopLevel(false) == null ? "(no Obss) " : "num Obs: " + this.getObsAtTopLevel(false) + " ";
ret += this.getOrders() == null ? "(no Orders) " : "num Orders: " + this.getOrders().size() + " ";
return "Encounter: [" + ret + "]";
}
/**
* @since 1.5
* @see org.openmrs.OpenmrsObject#getId()
*/
public Integer getId() {
return getEncounterId();
}
/**
* @since 1.5
* @see org.openmrs.OpenmrsObject#setId(java.lang.Integer)
*/
public void setId(Integer id) {
setEncounterId(id);
}
/**
* Gets the visit.
*
* @return the visit.
* @since 1.9
*/
public Visit getVisit() {
return visit;
}
/**
* Sets the visit
*
* @param visit the visit to set.
* @since 1.9
*/
public void setVisit(Visit visit) {
this.visit = visit;
}
/**
* Gets all unvoided providers, grouped by role.
*
* @return map of unvoided providers keyed by roles
* @since 1.9
* @should return empty map if no unvoided providers
* @should return all roles and unvoided providers
*/
public Map<EncounterRole, Set<Provider>> getProvidersByRoles() {
return getProvidersByRoles(false);
}
/**
* Gets all providers, grouped by role.
*
* @param includeVoided set to true to include voided providers, else set to false
* @return map of providers keyed by roles
* @since 1.9
* @should return empty map if no providers
* @should return all roles and providers
*/
public Map<EncounterRole, Set<Provider>> getProvidersByRoles(boolean includeVoided) {
Map<EncounterRole, Set<Provider>> providers = new HashMap<EncounterRole, Set<Provider>>();
for (EncounterProvider encounterProvider : encounterProviders) {
if (!includeVoided && encounterProvider.getVoided()) {
continue;
}
Set<Provider> list = providers.get(encounterProvider.getEncounterRole());
if (list == null) {
list = new LinkedHashSet<Provider>();
providers.put(encounterProvider.getEncounterRole(), list);
}
list.add(encounterProvider.getProvider());
}
return providers;
}
/**
* Gets unvoided providers who had the given role in this encounter.
*
* @param role
* @return unvoided providers or empty set if none was found
* @since 1.9
* @should return unvoided providers for role
* @should return empty set for no role
* @should return empty set for null role
*/
public Set<Provider> getProvidersByRole(EncounterRole role) {
return getProvidersByRole(role, false);
}
/**
* Gets providers who had the given role in this encounter.
*
* @param role
* @param includeVoided set to true to include voided providers, else set to false
* @return providers or empty set if none was found
* @since 1.9
* @should return providers for role
* @should return empty set for no role
* @should return empty set for null role
*/
public Set<Provider> getProvidersByRole(EncounterRole role, boolean includeVoided) {
Set<Provider> providers = new LinkedHashSet<Provider>();
for (EncounterProvider encounterProvider : encounterProviders) {
if (encounterProvider.getEncounterRole().equals(role)) {
if (!includeVoided && encounterProvider.getVoided()) {
continue;
}
providers.add(encounterProvider.getProvider());
}
}
return providers;
}
/**
* Adds a new provider for the encounter, with the given role.
*
* @param role
* @param provider
* @since 1.9
* @should add provider for new role
* @should add second provider for role
* @should not add same provider twice for role
*/
public void addProvider(EncounterRole role, Provider provider) {
// first, make sure the provider isn't already there
for (EncounterProvider ep : encounterProviders) {
if (ep.getEncounterRole().equals(role) && ep.getProvider().equals(provider))
return;
}
EncounterProvider encounterProvider = new EncounterProvider();
encounterProvider.setEncounter(this);
encounterProvider.setEncounterRole(role);
encounterProvider.setProvider(provider);
encounterProvider.setDateCreated(new Date());
encounterProvider.setCreator(Context.getAuthenticatedUser());
encounterProviders.add(encounterProvider);
}
/**
* Sets the provider for the given role.
* <p>
* If the encounter already had any providers for the given role, those are removed.
*
* @param role
* @param provider
* @since 1.9
* @should set provider for new role
* @should clear providers and set provider for role
* @should void existing EncounterProvider
*/
public void setProvider(EncounterRole role, Provider provider) {
boolean hasProvider = false;
for (Iterator<EncounterProvider> it = encounterProviders.iterator(); it.hasNext();) {
EncounterProvider encounterProvider = it.next();
if (encounterProvider.getEncounterRole().equals(role)) {
if (!encounterProvider.getProvider().equals(provider)) {
encounterProvider.setVoided(true);
encounterProvider.setDateVoided(new Date());
encounterProvider.setVoidedBy(Context.getAuthenticatedUser());
} else {
hasProvider = true;
}
}
}
if (!hasProvider) {
addProvider(role, provider);
}
}
/**
* Removes the provider for a given role.
*
* @param role the role.
* @param provider the provider.
* @since 1.9
* @should void existing EncounterProvider
*/
public void removeProvider(EncounterRole role, Provider provider) {
for (EncounterProvider encounterProvider : encounterProviders) {
if (encounterProvider.getEncounterRole().equals(role) && encounterProvider.getProvider().equals(provider)) {
encounterProvider.setVoided(true);
encounterProvider.setDateVoided(new Date());
encounterProvider.setVoidedBy(Context.getAuthenticatedUser());
return;
}
}
}
}
|
api/src/main/java/org/openmrs/Encounter.java
|
/**
* The contents of this file are subject to the OpenMRS Public License
* Version 1.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://license.openmrs.org
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations
* under the License.
*
* Copyright (C) OpenMRS, LLC. All Rights Reserved.
*/
package org.openmrs;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.openmrs.annotation.DisableHandlers;
import org.openmrs.api.context.Context;
import org.openmrs.api.handler.VoidHandler;
/**
* An Encounter represents one visit or interaction of a patient with a healthcare worker. Every
* encounter can have 0 to n Observations associated with it Every encounter can have 0 to n Orders
* associated with it The patientId attribute should be equal to patient.patientId and is only
* included this second time for performance increases on bulk calls.
*
* @see Obs
* @see Order
*/
public class Encounter extends BaseOpenmrsData implements java.io.Serializable {
public static final long serialVersionUID = 2L;
// Fields
private Integer encounterId;
private Date encounterDatetime;
private Patient patient;
private Integer patientId;
private Location location;
private Form form;
private EncounterType encounterType;
private Set<Order> orders;
private Set<Obs> obs;
private Visit visit;
@DisableHandlers(handlerTypes = { VoidHandler.class })
private Set<EncounterProvider> encounterProviders = new LinkedHashSet<EncounterProvider>();
// Constructors
/** default constructor */
public Encounter() {
}
/**
* @param encounterId
* @should set encounter id
*/
public Encounter(Integer encounterId) {
this.encounterId = encounterId;
}
// Property accessors
/**
* @return Returns the encounterDatetime.
*/
public Date getEncounterDatetime() {
return encounterDatetime;
}
/**
* @param encounterDatetime The encounterDatetime to set.
*/
public void setEncounterDatetime(Date encounterDatetime) {
this.encounterDatetime = encounterDatetime;
}
/**
* @return Returns the encounterId.
*/
public Integer getEncounterId() {
return encounterId;
}
/**
* @param encounterId The encounterId to set.
*/
public void setEncounterId(Integer encounterId) {
this.encounterId = encounterId;
}
/**
* @return Returns the encounterType.
*/
public EncounterType getEncounterType() {
return encounterType;
}
/**
* @param encounterType The encounterType to set.
*/
public void setEncounterType(EncounterType encounterType) {
this.encounterType = encounterType;
}
/**
* @return Returns the location.
*/
public Location getLocation() {
return location;
}
/**
* @param location The location to set.
*/
public void setLocation(Location location) {
this.location = location;
}
/**
* @return Returns a Set<Obs> of all non-voided, non-obsGroup children Obs of this Encounter
* @should not return null with null obs set
* @should get obs
* @should not get voided obs
* @should only get child obs
* @should not get child obs if child also on encounter
* @should get both child and parent obs after removing child from parent grouping
* @should get obs with two levels of hierarchy
* @should get obs with three levels of hierarchy
* @should not get voided obs with three layers of hierarchy
*/
public Set<Obs> getObs() {
Set<Obs> ret = new HashSet<Obs>();
if (this.obs != null) {
for (Obs o : this.obs)
ret.addAll(getObsLeaves(o));
// this should be all thats needed unless the encounter has been built by hand
//if (o.isVoided() == false && o.isObsGrouping() == false)
// ret.add(o);
}
return ret;
}
/**
* Convenience method to recursively get all leaf obs of this encounter. This method goes down
* into each obs and adds all non-grouping obs to the return list
*
* @param obsParent current obs to loop over
* @return list of leaf obs
*/
private List<Obs> getObsLeaves(Obs obsParent) {
List<Obs> leaves = new ArrayList<Obs>();
if (obsParent.hasGroupMembers()) {
for (Obs child : obsParent.getGroupMembers()) {
if (child.isVoided() == false) {
if (child.isObsGrouping() == false)
leaves.add(child);
else
// recurse if this is a grouping obs
leaves.addAll(getObsLeaves(child));
}
}
} else if (obsParent.isVoided() == false) {
leaves.add(obsParent);
}
return leaves;
}
/**
* Returns all Obs where Obs.encounterId = Encounter.encounterId In practice, this method should
* not be used very often...
*
* @param includeVoided specifies whether or not to include voided Obs
* @return Returns the all Obs.
* @should not return null with null obs set
* @should get obs
* @should get both parent and child obs
* @should get both parent and child with child directly on encounter
* @should get both child and parent obs after removing child from parent grouping
*/
public Set<Obs> getAllObs(boolean includeVoided) {
if (includeVoided && obs != null)
return obs;
Set<Obs> ret = new HashSet<Obs>();
if (this.obs != null) {
for (Obs o : this.obs) {
if (includeVoided)
ret.add(o);
else if (!o.isVoided())
ret.add(o);
}
}
return ret;
}
/**
* Convenience method to call {@link #getAllObs(boolean)} with a false parameter
*
* @return all non-voided obs
* @should not get voided obs
*/
public Set<Obs> getAllObs() {
return getAllObs(false);
}
/**
* Returns a Set<Obs> of all root-level Obs of an Encounter, including obsGroups
*
* @param includeVoided specifies whether or not to include voided Obs
* @return Returns all obs at top level -- will not be null
* @should not return null with null obs set
* @should get obs
* @should not get voided obs
* @should only get parents obs
* @should only return the grouped top level obs
* @should get both child and parent obs after removing child from parent grouping
*/
public Set<Obs> getObsAtTopLevel(boolean includeVoided) {
Set<Obs> ret = new HashSet<Obs>();
for (Obs o : getAllObs(includeVoided)) {
if (o.getObsGroup() == null)
ret.add(o);
}
return ret;
}
/**
* @param obs The obs to set.
*/
public void setObs(Set<Obs> obs) {
this.obs = obs;
}
/**
* Add the given Obs to the list of obs for this Encounter.
*
* @param observation the Obs to add to this encounter
* @should add obs with null values
* @should not fail with null obs
* @should set encounter attribute on obs
* @should add obs to non null initial obs set
* @should add encounter attrs to obs if attributes are null
*/
public void addObs(Obs observation) {
if (obs == null)
obs = new HashSet<Obs>();
if (observation != null) {
observation.setEncounter(this);
if (observation.getObsDatetime() == null)
observation.setObsDatetime(getEncounterDatetime());
if (observation.getPerson() == null)
observation.setPerson(getPatient());
if (observation.getLocation() == null)
observation.setLocation(getLocation());
obs.add(observation);
}
}
/**
* Remove the given observation from the list of obs for this Encounter
*
* @param observation
* @should remove obs successfully
* @should not throw error when removing null obs from empty set
* @should not throw error when removing null obs from non empty set
*/
public void removeObs(Obs observation) {
if (obs != null)
obs.remove(observation);
}
/**
* @return Returns the orders
*/
public Set<Order> getOrders() {
if (orders == null) {
return new HashSet<Order>();
}
return orders;
}
/**
* @param orders The orders to set.
*/
public void setOrders(Set<Order> orders) {
this.orders = orders;
}
/**
* Add the given Order to the list of orders for this Encounter
*
* @param order
* @should add order with null values
* @should not fail with null obs passed to add order
* @should set encounter attribute
* @should add order to non nul initial order set
*/
public void addOrder(Order order) {
if (orders == null)
orders = new HashSet<Order>();
if (order != null) {
order.setEncounter(this);
orders.add(order);
}
}
/**
* Remove the given observation from the list of orders for this Encounter
*
* @param order
* @should remove order from encounter
* @should not fail when removing null order
* @should not fail when removing non existent order
*/
public void removeOrder(Order order) {
if (orders != null)
orders.remove(order);
}
/**
* @return Returns the patient.
*/
public Patient getPatient() {
return patient;
}
/**
* @param patient The patient to set.
*/
public void setPatient(Patient patient) {
this.patient = patient;
}
/**
* @return the patientId
*/
public Integer getPatientId() {
return patientId;
}
/**
* @param patientId the patientId to set
*/
public void setPatientId(Integer patientId) {
this.patientId = patientId;
}
/**
* Basic property accessor for encounterProviders. The convenience methods getProvidersByRoles
* and getProvidersByRole are the preferred methods for getting providers. This getter is
* provided as a convenience for treating this like a DTO
*
* @return list of all existing providers on this encounter
* @see #getProvidersByRole(EncounterRole)
* @see #getProvidersByRoles()
*/
public Set<EncounterProvider> getEncounterProviders() {
return encounterProviders;
}
/**
* Basic property setter for encounterProviders. The convenience methods addProvider,
* removeProvider, and setProvider are the preferred methods for adding/removing providers. This
* setter is provided as a convenience for treating this like a DTO
*
* @param encounterProviders the list of EncounterProvider objects to set. Overwrites list as
* normal setter is inclined to do
* @see #addProvider(EncounterRole, Provider)
* @see #removeProvider(EncounterRole, Provider)
* @see #setProvider(EncounterRole, Provider)
*/
public void setEncounterProviders(Set<EncounterProvider> encounterProviders) {
this.encounterProviders = encounterProviders;
}
/**
* @return Returns the provider.
* @since 1.6 (used to return User)
* @deprecated since 1.9, use {@link #getProvidersByRole(EncounterRole)}
* @should return null if there is no providers
* @should return provider for person
* @should return null if there is no provider for person
* @should return same provider for person if called twice
*/
public Person getProvider() {
if (encounterProviders == null || encounterProviders.isEmpty()) {
return null;
} else {
for (EncounterProvider encounterProvider : encounterProviders) {
//Return the first person in the list
if (encounterProvider.getProvider().getPerson() != null) {
return encounterProvider.getProvider().getPerson();
}
}
}
return null;
}
/**
* @param provider The provider to set.
* @deprecated use {@link #setProvider(Person)}
*/
public void setProvider(User provider) {
setProvider(provider.getPerson());
}
/**
* @param provider The provider to set.
* @deprecated since 1.9, use {@link #setProvider(EncounterRole, Provider)}
* @should set existing provider for unknown role
*/
public void setProvider(Person provider) {
EncounterRole unknownRole = Context.getEncounterService().getEncounterRoleByUuid(
EncounterRole.UNKNOWN_ENCOUNTER_ROLE_UUID);
if (unknownRole == null) {
throw new IllegalStateException("No 'Unknown' encounter role with uuid "
+ EncounterRole.UNKNOWN_ENCOUNTER_ROLE_UUID + ".");
}
Collection<Provider> providers = Context.getProviderService().getProvidersByPerson(provider);
if (providers == null || providers.isEmpty()) {
throw new IllegalArgumentException("No provider with personId " + provider.getPersonId());
}
setProvider(unknownRole, providers.iterator().next());
}
/**
* @return Returns the form.
*/
public Form getForm() {
return form;
}
/**
* @param form The form to set.
*/
public void setForm(Form form) {
this.form = form;
}
/**
* @see java.lang.Object#toString()
* @should not fail with empty object
*/
@Override
public String toString() {
String ret = "";
ret += encounterId == null ? "(no ID) " : encounterId.toString() + " ";
ret += this.getEncounterDatetime() == null ? "(no Date) " : this.getEncounterDatetime().toString() + " ";
ret += this.getEncounterType() == null ? "(no Type) " : this.getEncounterType().getName() + " ";
ret += this.getLocation() == null ? "(no Location) " : this.getLocation().getName() + " ";
ret += this.getPatient() == null ? "(no Patient) " : this.getPatient().getPatientId().toString() + " ";
ret += this.getForm() == null ? "(no Form) " : this.getForm().getName() + " ";
ret += this.getObsAtTopLevel(false) == null ? "(no Obss) " : "num Obs: " + this.getObsAtTopLevel(false) + " ";
ret += this.getOrders() == null ? "(no Orders) " : "num Orders: " + this.getOrders().size() + " ";
return "Encounter: [" + ret + "]";
}
/**
* @since 1.5
* @see org.openmrs.OpenmrsObject#getId()
*/
public Integer getId() {
return getEncounterId();
}
/**
* @since 1.5
* @see org.openmrs.OpenmrsObject#setId(java.lang.Integer)
*/
public void setId(Integer id) {
setEncounterId(id);
}
/**
* Gets the visit.
*
* @return the visit.
* @since 1.9
*/
public Visit getVisit() {
return visit;
}
/**
* Sets the visit
*
* @param visit the visit to set.
* @since 1.9
*/
public void setVisit(Visit visit) {
this.visit = visit;
}
/**
* Gets all unvoided providers, grouped by role.
*
* @return map of unvoided providers keyed by roles
* @since 1.9
* @should return empty map if no unvoided providers
* @should return all roles and unvoided providers
*/
public Map<EncounterRole, Set<Provider>> getProvidersByRoles() {
return getProvidersByRoles(false);
}
/**
* Gets all providers, grouped by role.
*
* @param includeVoided set to true to include voided providers, else set to false
* @return map of providers keyed by roles
* @since 1.9
* @should return empty map if no providers
* @should return all roles and providers
*/
public Map<EncounterRole, Set<Provider>> getProvidersByRoles(boolean includeVoided) {
Map<EncounterRole, Set<Provider>> providers = new HashMap<EncounterRole, Set<Provider>>();
for (EncounterProvider encounterProvider : encounterProviders) {
if (!includeVoided && encounterProvider.getVoided()) {
continue;
}
Set<Provider> list = providers.get(encounterProvider.getEncounterRole());
if (list == null) {
list = new LinkedHashSet<Provider>();
providers.put(encounterProvider.getEncounterRole(), list);
}
list.add(encounterProvider.getProvider());
}
return providers;
}
/**
* Gets unvoided providers who had the given role in this encounter.
*
* @param role
* @return unvoided providers or empty set if none was found
* @since 1.9
* @should return unvoided providers for role
* @should return empty set for no role
* @should return empty set for null role
*/
public Set<Provider> getProvidersByRole(EncounterRole role) {
return getProvidersByRole(role, false);
}
/**
* Gets providers who had the given role in this encounter.
*
* @param role
* @param includeVoided set to true to include voided providers, else set to false
* @return providers or empty set if none was found
* @since 1.9
* @should return providers for role
* @should return empty set for no role
* @should return empty set for null role
*/
public Set<Provider> getProvidersByRole(EncounterRole role, boolean includeVoided) {
Set<Provider> providers = new LinkedHashSet<Provider>();
for (EncounterProvider encounterProvider : encounterProviders) {
if (encounterProvider.getEncounterRole().equals(role)) {
if (!includeVoided && encounterProvider.getVoided()) {
continue;
}
providers.add(encounterProvider.getProvider());
}
}
return providers;
}
/**
* Adds a new provider for the encounter, with the given role.
*
* @param role
* @param provider
* @since 1.9
* @should add provider for new role
* @should add second provider for role
* @should not add same provider twice for role
*/
public void addProvider(EncounterRole role, Provider provider) {
// first, make sure the provider isn't already there
for (EncounterProvider ep : encounterProviders) {
if (ep.getEncounterRole().equals(role) && ep.getProvider().equals(provider))
return;
}
EncounterProvider encounterProvider = new EncounterProvider();
encounterProvider.setEncounter(this);
encounterProvider.setEncounterRole(role);
encounterProvider.setProvider(provider);
encounterProvider.setDateCreated(new Date());
encounterProvider.setCreator(Context.getAuthenticatedUser());
encounterProviders.add(encounterProvider);
}
/**
* Sets the provider for the given role.
* <p>
* If the encounter already had any providers for the given role, those are removed.
*
* @param role
* @param provider
* @since 1.9
* @should set provider for new role
* @should clear providers and set provider for role
* @should void existing EncounterProvider
*/
public void setProvider(EncounterRole role, Provider provider) {
boolean hasProvider = false;
for (Iterator<EncounterProvider> it = encounterProviders.iterator(); it.hasNext();) {
EncounterProvider encounterProvider = it.next();
if (encounterProvider.getEncounterRole().equals(role)) {
if (!encounterProvider.getProvider().equals(provider)) {
encounterProvider.setVoided(true);
encounterProvider.setDateVoided(new Date());
encounterProvider.setVoidedBy(Context.getAuthenticatedUser());
} else {
hasProvider = true;
}
}
}
if (!hasProvider) {
addProvider(role, provider);
}
}
/**
* Removes the provider for a given role.
*
* @param role the role.
* @param provider the provider.
* @since 1.9
* @should void existing EncounterProvider
*/
public void removeProvider(EncounterRole role, Provider provider) {
for (EncounterProvider encounterProvider : encounterProviders) {
if (encounterProvider.getEncounterRole().equals(role) && encounterProvider.getProvider().equals(provider)) {
encounterProvider.setVoided(true);
encounterProvider.setDateVoided(new Date());
encounterProvider.setVoidedBy(Context.getAuthenticatedUser());
return;
}
}
}
}
|
Updating javadoc on getter/setter for encounterProviders property - TRUNK-3419
git-svn-id: ce3478dfdc990238714fcdf4fc6855b7489218cf@27968 5bac5841-c719-aa4e-b3fe-cce5062f897a
|
api/src/main/java/org/openmrs/Encounter.java
|
Updating javadoc on getter/setter for encounterProviders property - TRUNK-3419
|
|
Java
|
agpl-3.0
|
723c83f9232d6b8749c6930f336b5b3f18279a30
| 0
|
mnip91/proactive-component-monitoring,mnip91/proactive-component-monitoring,PaulKh/scale-proactive,jrochas/scale-proactive,PaulKh/scale-proactive,mnip91/programming-multiactivities,mnip91/programming-multiactivities,jrochas/scale-proactive,PaulKh/scale-proactive,jrochas/scale-proactive,mnip91/programming-multiactivities,PaulKh/scale-proactive,mnip91/proactive-component-monitoring,mnip91/programming-multiactivities,jrochas/scale-proactive,mnip91/proactive-component-monitoring,PaulKh/scale-proactive,mnip91/proactive-component-monitoring,mnip91/programming-multiactivities,mnip91/proactive-component-monitoring,mnip91/programming-multiactivities,jrochas/scale-proactive,PaulKh/scale-proactive,jrochas/scale-proactive,jrochas/scale-proactive,PaulKh/scale-proactive
|
/*
* ################################################################
*
* ProActive Parallel Suite(TM): The Java(TM) library for
* Parallel, Distributed, Multi-Core Computing for
* Enterprise Grids & Clouds
*
* Copyright (C) 1997-2012 INRIA/University of
* Nice-Sophia Antipolis/ActiveEon
* Contact: proactive@ow2.org or contact@activeeon.com
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation; version 3 of
* the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* If needed, contact us to obtain a release under GPL Version 2 or 3
* or a different license than the AGPL.
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s):
*
* ################################################################
* $$PROACTIVE_INITIAL_DEV$$
*/
package org.objectweb.proactive.core.runtime;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.UnknownHostException;
import java.rmi.AlreadyBoundException;
import java.security.AccessControlException;
import java.security.PublicKey;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.management.InstanceAlreadyExistsException;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import org.apache.log4j.Logger;
import org.apache.log4j.MDC;
import org.objectweb.proactive.ActiveObjectCreationException;
import org.objectweb.proactive.Body;
import org.objectweb.proactive.api.PAActiveObject;
import org.objectweb.proactive.api.PALifeCycle;
import org.objectweb.proactive.api.PARemoteObject;
import org.objectweb.proactive.core.Constants;
import org.objectweb.proactive.core.ProActiveException;
import org.objectweb.proactive.core.UniqueID;
import org.objectweb.proactive.core.body.AbstractBody;
import org.objectweb.proactive.core.body.ActiveBody;
import org.objectweb.proactive.core.body.Context;
import org.objectweb.proactive.core.body.LocalBodyStore;
import org.objectweb.proactive.core.body.UniversalBody;
import org.objectweb.proactive.core.body.ft.checkpointing.Checkpoint;
import org.objectweb.proactive.core.body.migration.MigrationException;
import org.objectweb.proactive.core.body.proxy.UniversalBodyProxy;
import org.objectweb.proactive.core.config.CentralPAPropertyRepository;
import org.objectweb.proactive.core.descriptor.data.ProActiveDescriptorInternal;
import org.objectweb.proactive.core.descriptor.data.VirtualNodeInternal;
import org.objectweb.proactive.core.descriptor.services.TechnicalService;
import org.objectweb.proactive.core.descriptor.util.RefactorPAD;
import org.objectweb.proactive.core.event.RuntimeRegistrationEvent;
import org.objectweb.proactive.core.event.RuntimeRegistrationEventProducerImpl;
import org.objectweb.proactive.core.filetransfer.FileTransferEngine;
import org.objectweb.proactive.core.gc.GarbageCollector;
import org.objectweb.proactive.core.httpserver.ClassServerServlet;
import org.objectweb.proactive.core.httpserver.HTTPServer;
import org.objectweb.proactive.core.jmx.mbean.JMXClassLoader;
import org.objectweb.proactive.core.jmx.mbean.ProActiveRuntimeWrapper;
import org.objectweb.proactive.core.jmx.mbean.ProActiveRuntimeWrapperMBean;
import org.objectweb.proactive.core.jmx.naming.FactoryName;
import org.objectweb.proactive.core.jmx.notification.GCMRuntimeRegistrationNotificationData;
import org.objectweb.proactive.core.jmx.notification.NotificationType;
import org.objectweb.proactive.core.jmx.notification.RuntimeNotificationData;
import org.objectweb.proactive.core.jmx.server.ServerConnector;
import org.objectweb.proactive.core.jmx.util.JMXNotificationManager;
import org.objectweb.proactive.core.mop.ConstructorCall;
import org.objectweb.proactive.core.mop.ConstructorCallExecutionFailedException;
import org.objectweb.proactive.core.mop.JavassistByteCodeStubBuilder;
import org.objectweb.proactive.core.mop.Utils;
import org.objectweb.proactive.core.node.Node;
import org.objectweb.proactive.core.node.NodeException;
import org.objectweb.proactive.core.node.NodeFactory;
import org.objectweb.proactive.core.node.NodeImpl;
import org.objectweb.proactive.core.process.UniversalProcess;
import org.objectweb.proactive.core.remoteobject.RemoteObjectExposer;
import org.objectweb.proactive.core.rmi.FileProcess;
import org.objectweb.proactive.core.runtime.broadcast.BroadcastDisabledException;
import org.objectweb.proactive.core.runtime.broadcast.RTBroadcaster;
import org.objectweb.proactive.core.security.PolicyServer;
import org.objectweb.proactive.core.security.ProActiveSecurity;
import org.objectweb.proactive.core.security.ProActiveSecurityManager;
import org.objectweb.proactive.core.security.SecurityConstants.EntityType;
import org.objectweb.proactive.core.security.SecurityContext;
import org.objectweb.proactive.core.security.SecurityEntity;
import org.objectweb.proactive.core.security.TypedCertificate;
import org.objectweb.proactive.core.security.crypto.KeyExchangeException;
import org.objectweb.proactive.core.security.crypto.SessionException;
import org.objectweb.proactive.core.security.domain.SecurityDomain;
import org.objectweb.proactive.core.security.exceptions.InvalidPolicyFile;
import org.objectweb.proactive.core.security.exceptions.RenegotiateSessionException;
import org.objectweb.proactive.core.security.exceptions.SecurityNotAvailableException;
import org.objectweb.proactive.core.security.securityentity.Entities;
import org.objectweb.proactive.core.security.securityentity.Entity;
import org.objectweb.proactive.core.util.ClassDataCache;
import org.objectweb.proactive.core.util.ProActiveInet;
import org.objectweb.proactive.core.util.ProActiveRandom;
import org.objectweb.proactive.core.util.URIBuilder;
import org.objectweb.proactive.core.util.log.Loggers;
import org.objectweb.proactive.core.util.log.ProActiveLogger;
/**
* <p>
* Implementation of ProActiveRuntime
* </p>
*
* @author The ProActive Team
* @version 1.0, 2001/10/23
* @since ProActive 0.91
*
*/
public class ProActiveRuntimeImpl extends RuntimeRegistrationEventProducerImpl implements ProActiveRuntime,
LocalProActiveRuntime {
//
// -- STATIC MEMBERS
// -----------------------------------------------------------
//
// the Unique instance of ProActiveRuntime
private static ProActiveRuntimeImpl proActiveRuntime;
// JMX
private static Logger jmxLogger = ProActiveLogger.getLogger(Loggers.JMX);
private static final Logger clLogger = ProActiveLogger.getLogger(Loggers.CLASSLOADING);
/**
*
* @return the proactive runtime associated to this jvm according to the
* current classloader
*/
private static synchronized ProActiveRuntimeImpl getProActiveRuntimeImpl() {
if (proActiveRuntime == null) {
try {
proActiveRuntime = new ProActiveRuntimeImpl();
proActiveRuntime.createMBean();
System.setProperty(PALifeCycle.PA_STARTED_PROP, "true");
if (CentralPAPropertyRepository.PA_RUNTIME_PING.isTrue()) {
new PARTPinger().start();
}
RTBroadcaster rtBrodcaster;
try {
rtBrodcaster = RTBroadcaster.getInstance();
// notify our presence on the lan
rtBrodcaster.sendCreation();
} catch (Exception e) {
// just keep it the feature is disabled
logger.debug("unable to activate RTBroadcast, reason is " + e.getMessage());
ProActiveLogger.logEatedException(logger, e);
}
} catch (Exception e) {
logger.fatal("Error while initializing ProActive Runtime", e);
throw new RuntimeException(e);
}
return proActiveRuntime;
} else {
return proActiveRuntime;
}
}
// runtime security manager
private static ProActiveSecurityManager runtimeSecurityManager;
// map of local nodes, key is node name
private Map<String, LocalNode> nodeMap;
//
// -- PRIVATE MEMBERS
// -----------------------------------------------------------
//
private VMInformationImpl vmInformation;
// map VirtualNodes and their names
private Map<String, VirtualNodeInternal> virtualNodesMap;
// map descriptor and their url
private Map<String, ProActiveDescriptorInternal> descriptorMap;
// map proActiveRuntime registered on this VM and their names
private Map<String, ProActiveRuntime> proActiveRuntimeMap;
private ProActiveRuntime parentRuntime;
protected RemoteObjectExposer<ProActiveRuntime> roe;
// JMX
/** The Server Connector to connect remotely to the JMX server */
private ServerConnector serverConnector;
private Object mutex = new Object();
/** The MBean representing this ProActive Runtime */
private ProActiveRuntimeWrapperMBean mbean;
private long gcmNodes;
//
// -- CONSTRUCTORS
// -----------------------------------------------------------
//
// singleton
protected ProActiveRuntimeImpl() throws ProActiveException {
try {
this.vmInformation = new VMInformationImpl();
this.proActiveRuntimeMap = new ConcurrentHashMap<String, ProActiveRuntime>();
this.virtualNodesMap = new ConcurrentHashMap<String, VirtualNodeInternal>();
this.descriptorMap = new ConcurrentHashMap<String, ProActiveDescriptorInternal>();
this.nodeMap = new ConcurrentHashMap<String, LocalNode>();
try {
String file = CentralPAPropertyRepository.PA_RUNTIME_SECURITY.getValue();
ProActiveSecurity.loadProvider();
if ((file != null) && new File(file).exists()) {
// loading security from a file
ProActiveRuntimeImpl.runtimeSecurityManager = new ProActiveSecurityManager(
EntityType.RUNTIME, file);
ProActiveLogger.getLogger(Loggers.SECURITY_RUNTIME).info(
"ProActive Security Policy (proactive.runtime.security) using " + file);
runtimeSecurityManager = runtimeSecurityManager.generateSiblingCertificate(
EntityType.RUNTIME, this.getVMInformation().getName());
// Is the runtime included within a Domain ?
String domainURL = CentralPAPropertyRepository.PA_RUNTIME_DOMAIN_URL.getValue();
if (domainURL != null) {
SecurityEntity domain = PAActiveObject.lookupActive(SecurityDomain.class, domainURL);
ProActiveRuntimeImpl.runtimeSecurityManager.setParent(domain);
}
} else {
ProActiveLogger
.getLogger(Loggers.SECURITY_RUNTIME)
.debug(
"ProActive Security Policy (proactive.runtime.security) not set. Runtime Security disabled ");
}
} catch (InvalidPolicyFile e) {
e.printStackTrace();
} catch (ActiveObjectCreationException e) {
e.printStackTrace();
}
// System.out.println(vmInformation.getVMID().toString());
} catch (UnknownHostException e) {
// System.out.println();
logger.fatal(" !!! Cannot do a reverse lookup on that host");
// System.out.println();
e.printStackTrace();
System.exit(1);
} catch (IOException e) {
e.printStackTrace();
}
// Remote Object exporter
this.roe = new RemoteObjectExposer<ProActiveRuntime>("ProActiveRuntime_" +
vmInformation.getHostName() + "_" + vmInformation.getVMID(),
org.objectweb.proactive.core.runtime.ProActiveRuntime.class.getName(), this,
ProActiveRuntimeRemoteObjectAdapter.class);
this.roe.createRemoteObject(vmInformation.getName(), false);
if (CentralPAPropertyRepository.PA_CLASSLOADING_USEHTTP.isTrue()) {
// Set the codebase in case of useHTTP is true and the
// ProActiveRMIClassLoader is in use
String codebase = ClassServerServlet.get().getCodeBase();
CentralPAPropertyRepository.PA_CODEBASE.setValue(codebase);
} else {
// Publish the URL of this runtime in the ProActive codebase
// URL must be prefixed by pa tu use our custom protocol handlers
// URL must be terminated by a / according to the RMI specification
CentralPAPropertyRepository.PA_CODEBASE.setValue("pa" + this.getURL() + "/");
}
if (CentralPAPropertyRepository.PA_CLASSLOADING_USEHTTP.isTrue()) {
// Set the codebase in case of useHTTP is true and the
// ProActiveRMIClassLoader is in use
String codebase = ClassServerServlet.get().getCodeBase();
CentralPAPropertyRepository.PA_CODEBASE.setValue(codebase);
} else {
// Publish the URL of this runtime in the ProActive codebase
// URL must be prefixed by pa tu use our custom protocol handlers
// URL must be terminated by a / according to the RMI specification
CentralPAPropertyRepository.PA_CODEBASE.setValue("pa" + this.getURL() + "/");
}
// logging info
MDC.remove("runtime");
MDC.put("runtime", getURL());
}
//
// -- PUBLIC METHODS
// -----------------------------------------------------------
//
public static ProActiveRuntimeImpl getProActiveRuntime() {
return getProActiveRuntimeImpl();
}
/**
* If no ServerConnector has been created, a new one is created and started.
* Any ProActive JMX Connector Client can connect to it remotely and manage
* the MBeans.
*
* @return the ServerConnector associated to this ProActiveRuntime
*/
public void startJMXServerConnector() {
synchronized (mutex) {
if (serverConnector == null) {
createServerConnector();
}
}
}
/**
* @inheritDoc
*/
@Override
public ProActiveRuntimeWrapperMBean getMBean() {
return mbean;
}
/**
* @inheritDoc
*/
@Override
public String getMBeanServerName() {
return URIBuilder.getNameFromURI(getProActiveRuntimeImpl().getURL());
}
/**
* @inheritDoc
*/
@Override
public ServerConnector getJMXServerConnector() {
return serverConnector;
}
//
// -- Implements LocalProActiveRuntime
// -----------------------------------------------
//
/**
* @inheritDoc
*/
@Override
public void registerLocalVirtualNode(VirtualNodeInternal vn, String vnName) {
// System.out.println("vn "+vnName+" registered");
this.virtualNodesMap.put(vnName, vn);
}
/**
* @inheritDoc
*/
@Override
public void setParent(ProActiveRuntime parentPARuntime) {
if (this.parentRuntime == null) {
this.parentRuntime = parentPARuntime;
} else {
runtimeLogger.error("Parent runtime already set!");
}
}
public void registerDescriptor(String url, ProActiveDescriptorInternal pad) {
this.descriptorMap.put(url, pad);
}
/**
* @inheritDoc
*/
@Override
public ProActiveDescriptorInternal getDescriptor(String url, boolean isHierarchicalSearch)
throws IOException, ProActiveException {
ProActiveDescriptorInternal pad = this.descriptorMap.get(url);
// hierarchical search or not, look if we know the pad
if (pad != null) {
// if pad found and hierarchy search return pad with no main
if (isHierarchicalSearch) {
return RefactorPAD.buildNoMainPAD(pad);
} else {
// if not hierarchy search, return the normal pad
return pad;
}
} else if (!isHierarchicalSearch) {
return null; // pad == null
} else { // else search pad in parent runtime
if (this.parentRuntime == null) {
throw new IOException(
"Descriptor cannot be found hierarchically since this runtime has no parent");
}
return this.parentRuntime.getDescriptor(url, true);
}
}
public void removeDescriptor(String url) {
this.descriptorMap.remove(url);
}
/**
* Creates a Server Connector
*/
private void createServerConnector() {
// One the Serverconnector is launched any ProActive JMX Connector
// client can connect to it remotely and manage the MBeans.
serverConnector = new ServerConnector(URIBuilder.getNameFromURI(getProActiveRuntimeImpl().getURL()));
try {
serverConnector.start();
} catch (IOException e) {
jmxLogger.error("Can't start the JMX Connector in the ProActive Runtime", e);
}
}
/**
* Creates the MBean associated to the ProActiveRuntime
*/
protected void createMBean() {
// JMX registration
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
JMXClassLoader jmxClassLoader = new JMXClassLoader(classLoader);
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
ObjectName objectName = null;
try {
objectName = new ObjectName("org.objectweb.proactive:type=JMXClassLoader");
} catch (MalformedObjectNameException e) {
jmxLogger.error("Can't create the objectName of the JMX ClassLoader MBean", e);
} catch (NullPointerException e) {
jmxLogger.error("Can't create the objectName of the JMX ClassLoader MBean", e);
}
try {
mbs.registerMBean(jmxClassLoader, objectName);
} catch (InstanceAlreadyExistsException e) {
jmxLogger.debug("A MBean with the object name " + objectName + " already exists", e);
} catch (MBeanRegistrationException e) {
jmxLogger.error("Can't register the MBean of the JMX ClassLoader", e);
} catch (NotCompliantMBeanException e) {
jmxLogger.error("The MBean of the JMX ClassLoader is not JMX compliant", e);
}
String runtimeUrl = getProActiveRuntimeImpl().getURL();
objectName = FactoryName.createRuntimeObjectName(runtimeUrl);
if (!mbs.isRegistered(objectName)) {
mbean = new ProActiveRuntimeWrapper(getProActiveRuntimeImpl());
try {
mbs.registerMBean(mbean, objectName);
} catch (InstanceAlreadyExistsException e) {
jmxLogger.error("A MBean with the object name " + objectName + " already exists", e);
} catch (MBeanRegistrationException e) {
jmxLogger.error("Can't register the MBean of the ProActive Runtime", e);
} catch (NotCompliantMBeanException e) {
jmxLogger.error("The MBean of the ProActive Runtime is not JMX compliant", e);
}
}
}
//
// -- Implements ProActiveRuntime
// -----------------------------------------------
//
/**
* @inheritDoc
*/
@Override
public Node createLocalNode(String nodeName, boolean replacePreviousBinding,
ProActiveSecurityManager nodeSecurityManager, String vnName) throws NodeException,
AlreadyBoundException {
if (!replacePreviousBinding && (this.nodeMap.get(nodeName) != null)) {
throw new AlreadyBoundException("Node " + nodeName +
" already created on this ProActiveRuntime. To overwrite this node, use true for replacePreviousBinding");
}
if (nodeSecurityManager != null) {
// setting the current runtime as parent entity of the node
nodeSecurityManager.setParent(this);
}
try {
LocalNode localNode = new LocalNode(nodeName, nodeSecurityManager, vnName, replacePreviousBinding);
if (replacePreviousBinding && (this.nodeMap.get(nodeName) != null)) {
localNode.setActiveObjects(this.nodeMap.get(nodeName).getActiveObjectsId());
this.nodeMap.remove(nodeName);
}
this.nodeMap.put(nodeName, localNode);
Node node = null;
try {
node = new NodeImpl((ProActiveRuntime) PARemoteObject.lookup(URI.create(localNode.getURL())),
localNode.getURL());
} catch (ProActiveException e) {
throw new NodeException("Failed to created NodeImpl", e);
}
return node;
} catch (ProActiveException e) {
throw new NodeException("Failed to create the LocalNode for " + nodeName, e);
}
}
/**
* @inheritDoc
*/
@Override
public Node createGCMNode(ProActiveSecurityManager nodeSecurityManager, String vnName,
List<TechnicalService> tsList) throws NodeException, AlreadyBoundException {
if (gcmNodes >= vmInformation.capacity) {
logger.warn("Runtime capacity exceeded. A bug inside GCM Deployment occured");
}
String nodeName = this.vmInformation.getName() + "_" + Constants.GCM_NODE_NAME + gcmNodes;
Node node = null;
try {
node = createLocalNode(nodeName, false, nodeSecurityManager, vnName);
for (TechnicalService ts : tsList) {
ts.apply(node);
}
} catch (NodeException e) {
// Cannot do something here. This node will node be created
logger.warn("Failed to create a capacity node", e);
} catch (AlreadyBoundException e) {
// CapacityNode- is a reserved name space.
// Should not happen, log it and delete the old node
logger.warn(nodeName + "is already registered... replacing it !");
try {
createLocalNode(nodeName, true, null, vnName);
} catch (NodeException e1) {
logger.warn("Failed to create a capacity node", e1);
} catch (AlreadyBoundException e1) {
// Cannot be thrown since replacePreviousBinding = true
logger.warn("Impossible exception ! Check Me !", e1);
}
}
gcmNodes++;
return node;
}
/**
* @inheritDoc
*/
@Override
public void killAllNodes() {
for (Map.Entry<String, LocalNode> e : this.nodeMap.entrySet()) {
String nodeName = e.getKey();
killNode(nodeName);
}
}
/**
* @inheritDoc
*/
@Override
public void killNode(String nodeName) {
LocalNode localNode = this.nodeMap.get(nodeName);
if (localNode != null) {
localNode.terminate();
}
this.nodeMap.remove(nodeName);
}
/**
* @inheritDoc
*/
@Override
public void createVM(UniversalProcess remoteProcess) throws java.io.IOException {
remoteProcess.startProcess();
}
/**
* @inheritDoc
*/
@Override
public String[] getLocalNodeNames() {
int i = 0;
String[] nodeNames;
synchronized (this.nodeMap) {
nodeNames = new String[this.nodeMap.size()];
for (Map.Entry<String, LocalNode> e : this.nodeMap.entrySet()) {
nodeNames[i] = e.getKey();
i++;
}
}
return nodeNames;
}
/**
* Returns a snapshot of all the local nodes
*
* The collection is a copy and is never updated by the ProActive Runtime.
*
* @return all the local nodes
*/
public Collection<LocalNode> getLocalNodes() {
return this.nodeMap.values();
}
/**
* @inheritDoc
*/
@Override
public VMInformation getVMInformation() {
return this.vmInformation;
}
/**
* @inheritDoc
*/
@Override
public void register(ProActiveRuntime proActiveRuntimeDist, String proActiveRuntimeName,
String creatorID, String creationProtocol, String vmName) {
// System.out.println("register in Impl");
// System.out.println("thread"+Thread.currentThread().getName());
// System.out.println(vmInformation.getVMID().toString());
this.proActiveRuntimeMap.put(proActiveRuntimeName, proActiveRuntimeDist);
// ProActiveEvent
notifyListeners(this, RuntimeRegistrationEvent.RUNTIME_REGISTERED, proActiveRuntimeDist, creatorID,
creationProtocol, vmName);
// END ProActiveEvent
// JMX Notification
if (getMBean() != null) {
RuntimeNotificationData notificationData = new RuntimeNotificationData(creatorID,
proActiveRuntimeDist.getURL(), creationProtocol, vmName);
getMBean().sendNotification(NotificationType.runtimeRegistered, notificationData);
}
// END JMX Notification
}
/**
* @inheritDoc
*/
@Override
public void unregister(ProActiveRuntime proActiveRuntimeDist, String proActiveRuntimeUrl,
String creatorID, String creationProtocol, String vmName) {
this.proActiveRuntimeMap.remove(proActiveRuntimeUrl);
// ProActiveEvent
notifyListeners(this, RuntimeRegistrationEvent.RUNTIME_UNREGISTERED, proActiveRuntimeDist, creatorID,
creationProtocol, vmName);
// END ProActiveEvent
// JMX Notification
if (getMBean() != null) {
RuntimeNotificationData notificationData = new RuntimeNotificationData(creatorID,
proActiveRuntimeDist.getURL(), creationProtocol, vmName);
getMBean().sendNotification(NotificationType.runtimeUnregistered, notificationData);
}
// END JMX Notification
}
/**
* @inheritDoc
*/
@Override
public ProActiveRuntime[] getProActiveRuntimes() {
if (this.proActiveRuntimeMap != null) {
return this.proActiveRuntimeMap.values().toArray(new ProActiveRuntime[] {});
} else {
return null;
}
}
/**
* @inheritDoc
*/
@Override
public ProActiveRuntime getProActiveRuntime(String proActiveRuntimeName) {
return this.proActiveRuntimeMap.get(proActiveRuntimeName);
}
/**
* @inheritDoc
*/
@Override
public synchronized void killRT(boolean softly) {
cleanJvmFromPA();
// END JMX unregistration
System.exit(0);
}
public synchronized void cleanJvmFromPA() {
// JMX Notification
if (getMBean() != null) {
getMBean().sendNotification(NotificationType.runtimeDestroyed);
}
// END JMX Notification
// terminates the nodes and their active objects
killAllNodes();
logger.info("terminating Runtime " + vmInformation.getName());
// JMX unregistration
if (getMBean() != null) {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
ObjectName objectName = getMBean().getObjectName();
if (mbs.isRegistered(objectName)) {
try {
mbs.unregisterMBean(objectName);
} catch (InstanceNotFoundException e) {
jmxLogger.error("The MBean with the objectName " + objectName + " was not found", e);
} catch (MBeanRegistrationException e) {
jmxLogger.error("The MBean with the objectName " + objectName +
" can't be unregistered from the MBean server", e);
}
}
mbean = null;
}
// terminate the broadcast thread if exist
RTBroadcaster broadcaster;
try {
broadcaster = RTBroadcaster.getInstance();
broadcaster.kill();
} catch (BroadcastDisabledException e1) {
// just display the message
logger.debug(e1.getMessage());
}
Iterator<UniversalBody> bodies = LocalBodyStore.getInstance().getLocalBodies().bodiesIterator();
UniversalBody body;
while (bodies.hasNext()) {
try {
body = bodies.next();
((Body) body).terminate();
} catch (Throwable e) {
e.printStackTrace();
}
}
Iterator<UniversalBody> halfBodies = LocalBodyStore.getInstance().getLocalHalfBodies()
.bodiesIterator();
UniversalBody halfBody;
while (halfBodies.hasNext()) {
try {
halfBody = halfBodies.next();
((Body) halfBody).terminate();
} catch (Throwable e) {
e.printStackTrace();
}
}
// unexport the runtime
try {
this.roe.unexportAll();
} catch (ProActiveException e) {
logger.warn("unable to unexport the runtime", e);
}
try {
HTTPServer.get().stop();
HTTPServer.get().destroy();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
this.roe = null;
proActiveRuntime = null;
}
/**
* @inheritDoc
*/
@Override
public String getURL() {
return this.roe.getURL();
}
/**
* @inheritDoc
*/
@Override
public List<UniversalBody> getActiveObjects(String nodeName) {
// the array to return
List<UniversalBody> localBodies = new ArrayList<UniversalBody>();
LocalBodyStore localBodystore = LocalBodyStore.getInstance();
List<UniqueID> bodyList = this.nodeMap.get(nodeName).getActiveObjectsId();
if (bodyList == null) {
// Probably the node is killed
return localBodies;
}
synchronized (bodyList) {
for (int i = 0; i < bodyList.size(); i++) {
UniqueID bodyID = bodyList.get(i);
// check if the body is still on this vm
Body body = localBodystore.getLocalBody(bodyID);
if (body == null) {
// runtimeLogger.warn("body null");
// the body with the given ID is not any more on this
// ProActiveRuntime
// unregister it from this ProActiveRuntime
unregisterBody(nodeName, bodyID);
} else {
// the body is on this runtime then return adapter and class
// name of the reified
// object to enable the construction of stub-proxy couple.
localBodies.add(0, body.getRemoteAdapter());
}
}
return localBodies;
}
}
/**
* @inheritDoc
*/
@Override
public VirtualNodeInternal getVirtualNode(String virtualNodeName) {
// System.out.println("i am in get vn ");
return this.virtualNodesMap.get(virtualNodeName);
}
/**
* @inheritDoc
*/
@Override
public void registerVirtualNode(String virtualNodeName, boolean replacePreviousBinding)
throws ProActiveException {
this.roe.createRemoteObject(virtualNodeName, false);
}
/**
* @inheritDoc
*/
@Override
public void unregisterVirtualNode(String virtualNodeName) {
VirtualNodeInternal vn = virtualNodesMap.get(virtualNodeName);
if (vn != null) {
JMXNotificationManager.getInstance().unsubscribe(getMBean().getObjectName(), vn);
}
virtualNodesMap.remove(virtualNodeName);
}
/**
* @inheritDoc
*/
@Override
public void unregisterAllVirtualNodes() {
this.virtualNodesMap.clear();
}
/**
* @inheritDoc
*/
@Override
public List<UniversalBody> getActiveObjects(String nodeName, String className) {
// the array to return
ArrayList<UniversalBody> localBodies = new ArrayList<UniversalBody>();
LocalBodyStore localBodystore = LocalBodyStore.getInstance();
List<UniqueID> bodyList = this.nodeMap.get(nodeName).getActiveObjectsId();
if (bodyList == null) {
// Probably the node is killed
return localBodies;
}
synchronized (bodyList) {
for (int i = 0; i < bodyList.size(); i++) {
UniqueID bodyID = bodyList.get(i);
// check if the body is still on this vm
Body body = localBodystore.getLocalBody(bodyID);
if (body == null) {
// runtimeLogger.warn("body null");
// the body with the given ID is not any more on this
// ProActiveRuntime
// unregister it from this ProActiveRuntime
unregisterBody(nodeName, bodyID);
} else {
String objectClass = body.getReifiedObject().getClass().getName();
// if the reified object is of the specified type
// return the body adapter
if (objectClass.equals(className)) {
localBodies.add(body.getRemoteAdapter());
}
}
}
return localBodies;
}
}
/**
* @inheritDoc
*/
@Override
public UniversalBody createBody(String nodeName, ConstructorCall bodyConstructorCall, boolean isLocal)
throws ConstructorCallExecutionFailedException, java.lang.reflect.InvocationTargetException,
ActiveObjectCreationException {
if (NodeFactory.isHalfBodiesNode(nodeName)) {
throw new ActiveObjectCreationException(
"Cannot create an active object on the reserved halfbodies node.");
}
Body localBody = (Body) bodyConstructorCall.execute();
// SECURITY
ProActiveSecurityManager objectSecurityManager = ((AbstractBody) localBody)
.getProActiveSecurityManager();
if (objectSecurityManager != null) {
objectSecurityManager.setParent(this.nodeMap.get(nodeName));
}
ProActiveLogger.getLogger(Loggers.RUNTIME).debug("nodeName " + nodeName);
registerBody(nodeName, localBody);
if (GarbageCollector.dgcIsEnabled()) {
((AbstractBody) localBody).updateReferences(UniversalBodyProxy.getIncomingReferences());
}
if (isLocal) {
// if the body and proxy are on the same vm, returns the local view
// System.out.println("body and proxy on the same vm");
// System.out.println(localBody.getReifiedObject().getClass().getName());
// register the body in the nodemap
return localBody;
} else {
// otherwise return the adapter
// System.out.println ("RemoteProActiveImpl.createBody
// "+vmInformation.getInetAddress().getHostName() +" -> new
// "+bodyConstructorCall.getTargetClassName()+" on node "+nodeName);
// System.out.println ("RemoteProActiveRuntimeImpl.localBody created
// localBody="+localBody+" on node "+nodeName);
return localBody.getRemoteAdapter();
}
}
/**
* @inheritDoc
*/
@Override
public UniversalBody receiveBody(String nodeName, Body body) throws MigrationException {
ProActiveSecurityManager psm = ((AbstractBody) body).getProActiveSecurityManager();
if (psm != null) {
psm.setParent(this.nodeMap.get(nodeName));
}
if (NodeFactory.isHalfBodiesNode(nodeName)) {
throw new MigrationException("Cannot migrate an active object on the reserved halfbodies node.");
}
registerBody(nodeName, body);
// register futures that have been deserialized in the body
((AbstractBody) body).registerIncomingFutures();
return body.getRemoteAdapter();
}
/**
* @inheritDoc
*/
@Override
public UniversalBody receiveCheckpoint(String nodeURL, Checkpoint ckpt, int inc)
throws ProActiveException {
runtimeLogger.debug("Receive a checkpoint for recovery");
if (NodeFactory.isHalfBodiesNode(nodeURL)) {
throw new ProActiveException("Cannot recover an active object on the reserved halfbodies node.");
}
// the recovered body
Body ret = ckpt.recover();
// update node url
ret.updateNodeURL(nodeURL);
String nodeName = URIBuilder.getNameFromURI(nodeURL);
// push the initial context for the current thread.
// need to register as thread of the corresponding active object: this
// thread
// may send looged requests or logged replies
LocalBodyStore.getInstance().pushContext(new Context(ret, null));
try {
((AbstractBody) ret).getFTManager().beforeRestartAfterRecovery(ckpt.getCheckpointInfo(), inc);
} finally {
// remove contexts for the current thread
LocalBodyStore.getInstance().clearAllContexts();
}
// register the body
this.registerBody(nodeName, ret);
// register futures that have been deserialized in the body
((AbstractBody) ret).registerIncomingFutures();
// restart actvity
if (runtimeLogger.isDebugEnabled()) {
runtimeLogger.debug(ret.getID() + " is restarting activity...");
}
((ActiveBody) ret).startBody();
// no more need to return the recovered body
return null;
}
/**
* Registers the specified body in the node with the nodeName key. In fact
* it is the <code>UniqueID</code> of the body that is attached to the node.
*
* @param nodeName
* The name where to attached the body in the
* <code>hostsMap</code>
* @param body
* The body to register
*/
private void registerBody(String nodeName, Body body) {
UniqueID bodyID = body.getID();
List<UniqueID> bodyList = this.nodeMap.get(nodeName).getActiveObjectsId();
synchronized (bodyList) {
if (!bodyList.contains(bodyID)) {
// System.out.println("in registerbody id = "+
// bodyID.toString());
bodyList.add(bodyID);
}
}
}
/**
* Unregisters the specified <code>UniqueID</code> from the node
* corresponding to the nodeName key
*
* @param nodeName
* The key where to remove the <code>UniqueID</code>
* @param bodyID
* The <code>UniqueID</code> to remove
*/
private void unregisterBody(String nodeName, UniqueID bodyID) {
// System.out.println("in remove id= "+ bodyID.toString());
// System.out.println("array size
// "+((ArrayList)hostsMap.get(nodeName)).size());
List<UniqueID> bodyList = this.nodeMap.get(nodeName).getActiveObjectsId();
synchronized (bodyList) {
bodyList.remove(bodyID);
// System.out.println("array size
// "+((ArrayList)hostsMap.get(nodeName)).size());
}
}
// SECURITY
/**
* set the runtime security manager
*/
public static void setProActiveSecurityManager(ProActiveSecurityManager server) {
if (runtimeSecurityManager != null) {
return;
}
runtimeSecurityManager = server;
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#getEntities(java
* .lang.String)
*/
public Entities getEntities(String nodeName) {
ProActiveSecurityManager nodeSecurityManager = this.nodeMap.get(nodeName).getSecurityManager();
Entities entities = this.getEntities();
if (nodeSecurityManager != null) {
entities.add(new Entity(nodeSecurityManager.getMyCertificateChain()));
}
return entities;
}
/**
* the runtime looks for a matching security entity whithin its nodes and
* active objects
*
* @param securityEntity
* the security entity looked for.
* @return matching entities
*/
public Entities getEntities(SecurityEntity securityEntity) {
if (true) {
throw new RuntimeException();
}
return null;
// hostsMap.
// try {
// System.out.println(" testing for securityentityID " +
// securityEntity);
// for (Enumeration e = hostsMap.keys(); e.hasMoreElements();) {
// String node = (String) e.nextElement();
//
// System.out.println("testing for node " + node);
// ArrayList listAO = (ArrayList) hostsMap.get(node);
//
// for (int i = 0; i < listAO.size(); i++) {
// UniqueID localBodyID = (UniqueID) listAO.get(i);
// System.out.println(" testing against localBbodyID " +
// localBodyID);
//
// if (securityEntity.getCertificate().equals(localBodyID)) {
// ArrayList a = new ArrayList();
//
// ProActiveSecurityManager nodeSecurityManager =
// (ProActiveSecurityManager) nodeSecurityManagerMap.get(node);
// PolicyServer nodePolicyServer =
// nodeSecurityManager.getPolicyServer();
//
// if (nodePolicyServer != null) {
// EntityVirtualNode entityVirtualNode = new
// EntityVirtualNode(nodeSecurityManager.getVNName(),
// nodePolicyServer.getApplicationCertificate(),
// nodeSecurityManager.getCertificate());
// a.add(entityVirtualNode);
// return a;
// }
// }
//
// }
// }
// } catch (SecurityNotAvailableException e1) {
// e1.printStackTrace();
// } catch (IOException e1) {
// e1.printStackTrace();
// }
// return new ArrayList();
}
/*
* (non-Javadoc)
*
* @see org.objectweb.proactive.core.runtime.ProActiveRuntime#getEntities()
*/
public Entities getEntities() {
Entities entities = new Entities();
if (runtimeSecurityManager != null) {
entities.add(new Entity(runtimeSecurityManager.getMyCertificateChain()));
return entities;
}
return null;
}
/**
* @param local
* @param distant
*/
public SecurityContext getPolicy(Entities local, Entities distant) throws SecurityNotAvailableException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
// PolicyServer policyServer = runtimeSecurityManager.getPolicyServer();
return runtimeSecurityManager.getPolicy(local, distant);
}
public synchronized byte[] getClassData(String className) {
byte[] classData = null;
// Check class data cache (already generated stub)
classData = ClassDataCache.instance().getClassData(className);
if (classData != null) {
return classData;
} else {
if (clLogger.isTraceEnabled()) {
clLogger.trace(className + " is not in the class data cache");
}
}
// Look in classpath
try {
classData = FileProcess.getBytesFromResource(className);
if (classData != null) {
if (clLogger.isTraceEnabled()) {
clLogger.trace("Found " + className + " in the classpath");
}
return classData;
} else {
if (clLogger.isTraceEnabled()) {
clLogger.trace("Failed to find " + className + " in classpath");
}
}
} catch (IOException e2) {
Logger l = ProActiveLogger.getLogger(Loggers.CLASSLOADING);
ProActiveLogger.logEatedException(l, e2);
}
// Generate stub
classData = generateStub(className);
if (classData != null) {
if (clLogger.isTraceEnabled()) {
clLogger.trace("Generated " + className + " stub");
}
return classData;
} else {
if (clLogger.isTraceEnabled()) {
clLogger.trace("Failed to generate stub for " + className);
}
}
return null;
}
public void launchMain(String className, String[] parameters) throws ClassNotFoundException,
NoSuchMethodException, ProActiveException {
System.out.println("ProActiveRuntimeImpl.launchMain() -" + className + "-");
Class<?> mainClass = Class.forName(className);
Method mainMethod = mainClass.getMethod("main", new Class[] { String[].class });
new LauncherThread(mainMethod, parameters).start();
}
public void newRemote(String className) throws ClassNotFoundException, ProActiveException {
Class<?> remoteClass = Class.forName(className);
new LauncherThread(remoteClass).start();
}
// tries to generate a stub without using MOP methods
private byte[] generateStub(String className) {
byte[] classData = null;
if (Utils.isStubClassName(className)) {
// do not use directly MOP methods (avoid classloader cycles)
String classname = Utils.convertStubClassNameToClassName(className);
classData = JavassistByteCodeStubBuilder.create(classname, null);
if (classData != null) {
ClassDataCache.instance().addClassData(className, classData);
return classData;
}
}
// try to get the class as a generated component interface reference
classData = org.objectweb.proactive.core.component.gen.Utils.getClassData(className);
if (classData != null) {
ClassDataCache.instance().addClassData(className, classData);
return classData;
}
return null;
}
/**
* @inheritDoc
*/
@Override
public void terminateSession(long sessionID) throws SecurityNotAvailableException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
runtimeSecurityManager.terminateSession(sessionID);
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#getCertificate()
*/
public TypedCertificate getCertificate() throws SecurityNotAvailableException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
return runtimeSecurityManager.getCertificate();
}
/*
* (non-Javadoc)
*
* @see org.objectweb.proactive.core.runtime.ProActiveRuntime#
* getProActiveSecurityManager()
*/
public ProActiveSecurityManager getProActiveSecurityManager() {
return runtimeSecurityManager;
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#startNewSession
* (org.objectweb.proactive.ext.security.Communication)
*/
public long startNewSession(long distantSessionID, SecurityContext policy,
TypedCertificate distantCertificate) throws SecurityNotAvailableException, SessionException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
return runtimeSecurityManager.startNewSession(distantSessionID, policy, distantCertificate);
}
/*
* (non-Javadoc)
*
* @see org.objectweb.proactive.core.runtime.ProActiveRuntime#getPublicKey()
*/
public PublicKey getPublicKey() throws SecurityNotAvailableException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
return runtimeSecurityManager.getPublicKey();
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#randomValue(long,
* byte[])
*/
public byte[] randomValue(long sessionID, byte[] clientRandomValue) throws SecurityNotAvailableException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
try {
return runtimeSecurityManager.randomValue(sessionID, clientRandomValue);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#publicKeyExchange
* (long, org.objectweb.proactive.core.body.UniversalBody, byte[], byte[],
* byte[])
*/
public byte[] publicKeyExchange(long sessionID, byte[] signature) throws SecurityNotAvailableException,
RenegotiateSessionException {
if (runtimeSecurityManager != null) {
try {
return runtimeSecurityManager.publicKeyExchange(sessionID, signature);
} catch (KeyExchangeException e) {
e.printStackTrace();
}
} else {
throw new SecurityNotAvailableException();
}
return null;
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#secretKeyExchange
* (long, byte[], byte[], byte[], byte[], byte[])
*/
public byte[][] secretKeyExchange(long sessionID, byte[] encodedAESKey, byte[] encodedIVParameters,
byte[] encodedClientMacKey, byte[] encodedLockData, byte[] parametersSignature)
throws SecurityNotAvailableException, RenegotiateSessionException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
return runtimeSecurityManager.secretKeyExchange(sessionID, encodedAESKey, encodedIVParameters,
encodedClientMacKey, encodedLockData, parametersSignature);
}
// /*
// * (non-Javadoc)
// *
// * @see
// org.objectweb.proactive.core.runtime.ProActiveRuntime#getCertificateEncoded()
// */
// public byte[] getCertificateEncoded() throws
// SecurityNotAvailableException {
// if (runtimeSecurityManager == null) {
// throw new SecurityNotAvailableException();
// }
// return runtimeSecurityManager.getCertificateEncoded();
// }
public String getVNName(String nodename) throws ProActiveException {
return this.nodeMap.get(nodename).getVirtualNodeName();
}
//
// -- INNER CLASSES -----------------------------------------------
//
protected static class VMInformationImpl implements VMInformation, java.io.Serializable {
private final java.net.InetAddress hostInetAddress;
// the Unique ID of the JVM
private final java.rmi.dgc.VMID uniqueVMID;
private String name;
private long capacity;
private final String hostName;
private long deploymentId;
private long topologyId;
private String vmName;
public VMInformationImpl() throws java.net.UnknownHostException {
this.uniqueVMID = UniqueID.getCurrentVMID();
this.hostInetAddress = ProActiveInet.getInstance().getInetAddress();
this.hostName = URIBuilder.getHostNameorIP(this.hostInetAddress);
String random = Integer.toString(ProActiveRandom.nextPosInt());
if (CentralPAPropertyRepository.PA_RUNTIME_NAME.isSet()) {
this.name = CentralPAPropertyRepository.PA_RUNTIME_NAME.getValue();
} else {
this.name = "PA_JVM" + random; // + "_" + this.hostName;
}
this.capacity = -1;
this.deploymentId = -1;
this.topologyId = -1;
this.vmName = null;
}
//
// -- PUBLIC METHODS -----------------------------------------------
//
//
// -- implements VMInformation
// -----------------------------------------------
//
public java.rmi.dgc.VMID getVMID() {
return this.uniqueVMID;
}
public String getName() {
return this.name;
}
public java.net.InetAddress getInetAddress() {
return this.hostInetAddress;
}
/**
* @see org.objectweb.proactive.core.runtime.VMInformation#getHostName()
*/
public String getHostName() {
return this.hostName;
}
/**
* @see org.objectweb.proactive.core.runtime.VMInformation#getDescriptorVMName()
*/
public String getDescriptorVMName() {
return this.vmName;
}
public long getCapacity() {
return capacity;
}
private void setCapacity(long capacity) {
this.capacity = capacity;
}
public long getTopologyId() {
return topologyId;
}
private void setTopologyId(long topologyId) {
this.topologyId = topologyId;
}
public long getDeploymentId() {
return deploymentId;
}
private void setDeploymentId(long deploymentId) {
this.deploymentId = deploymentId;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = (prime * result) + ((uniqueVMID == null) ? 0 : uniqueVMID.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final VMInformationImpl other = (VMInformationImpl) obj;
if (uniqueVMID == null) {
if (other.uniqueVMID != null) {
return false;
}
} else if (!uniqueVMID.equals(other.uniqueVMID)) {
return false;
}
return true;
}
}
//
// ----------------- INNER CLASSES --------------------------------
//
/**
* inner class for method invocation
*/
private class LauncherThread extends Thread {
private final boolean launchMain;
private Method mainMethod;
private Class<?> remoteClass;
private String[] parameters;
public LauncherThread(Class<?> remoteClass) {
this.remoteClass = remoteClass;
this.launchMain = false;
}
public LauncherThread(Method mainMethod, String[] parameters) {
this.mainMethod = mainMethod;
this.parameters = parameters;
this.launchMain = true;
}
@Override
public void run() {
if (this.launchMain) {
try {
this.mainMethod.invoke(null, new Object[] { this.parameters });
} catch (InvocationTargetException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
} else {
try {
this.remoteClass.newInstance();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InstantiationException e) {
e.printStackTrace();
}
}
}
}
/**
* @see org.objectweb.proactive.core.runtime.ProActiveRuntime#setLocalNodeProperty(java.lang.String,
* java.lang.String, java.lang.String)
*/
public Object setLocalNodeProperty(String nodeName, String key, String value) {
return this.nodeMap.get(nodeName).setProperty(key, value);
}
/**
* @see org.objectweb.proactive.core.runtime.ProActiveRuntime#getLocalNodeProperty(java.lang.String,
* java.lang.String)
*/
public String getLocalNodeProperty(String nodeName, String key) {
return this.nodeMap.get(nodeName).getProperty(key);
}
public RemoteObjectExposer<ProActiveRuntime> getRemoteObjectExposer() {
return this.roe;
}
public String[] getURLs() {
return this.roe.getURLs();
}
public ProActiveSecurityManager getProActiveSecurityManager(Entity user)
throws SecurityNotAvailableException, AccessControlException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
return runtimeSecurityManager.getProActiveSecurityManager(user);
}
public void setProActiveSecurityManager(Entity user, PolicyServer policyServer)
throws SecurityNotAvailableException, AccessControlException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
runtimeSecurityManager.setProActiveSecurityManager(user, policyServer);
}
public void setCapacity(long capacity) {
if (vmInformation.getCapacity() > 0) {
throw new IllegalStateException("setCapacity already set to " + vmInformation.getCapacity());
}
if (capacity < 1) {
throw new IllegalArgumentException(capacity +
" is not a valid parameter for setCapicity. Must be a strictly positive long");
}
logger.debug("Capacity set to " + capacity + ". Creating the nodes...");
vmInformation.setCapacity(capacity);
}
public void register(GCMRuntimeRegistrationNotificationData notification) {
// createRegistrationForwarder();
getMBean().sendNotification(NotificationType.GCMRuntimeRegistered, notification);
}
public FileTransferEngine getFileTransferEngine() {
return FileTransferEngine.getFileTransferEngine();
}
public void addDeployment(long deploymentId) {
}
public void setDeploymentId(long deploymentId) {
vmInformation.setDeploymentId(deploymentId);
}
public void setTopologyId(long toplogyId) {
vmInformation.setTopologyId(toplogyId);
}
public void setVMName(String vmName) {
vmInformation.vmName = vmName;
}
/**
* Returns the path to the proactive home
*
* This method is quite expensive if
* {@link CentralPAPropertyRepository#PA_HOME} is not set. If called often,
* the value returned by this method should be set as value of PA_HOME. This
* method has no side effect.
*
*
*
* @since ProActive 5.0.0
*
* @return The value of {@link CentralPAPropertyRepository#PA_HOME} if it is
* set. Otherwise the path is computed according to the class or jar
* location.
*
* @throws ProActiveException
* If the path of the ProActive home cannot be computed or if
* the home is remote (only file and jar protocols are
* supported)
*/
public String getProActiveHome() throws ProActiveException {
if (CentralPAPropertyRepository.PA_HOME.isSet()) {
return CentralPAPropertyRepository.PA_HOME.getValue();
} else {
// Guess the location by using the classloader
final URL url = this.getClass().getResource(this.getClass().getSimpleName() + ".class");
final String path = url.getPath();
if ("jar".equals(url.getProtocol())) {
int begin = path.indexOf("file:");
int end = path.indexOf(".jar!");
if (begin != 0 || end < 0) {
throw new ProActiveException("Unable to find ProActive home. Bad jar url: " + url);
}
end = path.indexOf("dist/lib/ProActive.jar!");
if (end < 0) {
throw new ProActiveException("Unable to find ProActive home. Unexpected jar name: " + url);
}
try {
File padir = new File(new URI(path.substring(begin, end)));
return padir.getCanonicalPath();
} catch (URISyntaxException e) {
throw new ProActiveException(e);
} catch (IOException e) {
throw new ProActiveException(e);
}
} else if ("file".equals(url.getProtocol())) {
int index = path.indexOf("classes/Core/" + this.getClass().getName().replace('.', '/') +
".class");
if (index > 0) {
try {
return new File(new URI("file:" + path.substring(0, index))).getCanonicalPath();
} catch (URISyntaxException e) {
throw new ProActiveException(e);
} catch (IOException e) {
throw new ProActiveException(e);
}
} else {
throw new ProActiveException(
"Unable to find ProActive home. Running from class files but non standard repository layout");
}
} else {
throw new ProActiveException("Unable to find ProActive home. Unspported protocol: " + url);
}
}
}
}
|
src/Core/org/objectweb/proactive/core/runtime/ProActiveRuntimeImpl.java
|
/*
* ################################################################
*
* ProActive Parallel Suite(TM): The Java(TM) library for
* Parallel, Distributed, Multi-Core Computing for
* Enterprise Grids & Clouds
*
* Copyright (C) 1997-2012 INRIA/University of
* Nice-Sophia Antipolis/ActiveEon
* Contact: proactive@ow2.org or contact@activeeon.com
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation; version 3 of
* the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* If needed, contact us to obtain a release under GPL Version 2 or 3
* or a different license than the AGPL.
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s):
*
* ################################################################
* $$PROACTIVE_INITIAL_DEV$$
*/
package org.objectweb.proactive.core.runtime;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.UnknownHostException;
import java.rmi.AlreadyBoundException;
import java.security.AccessControlException;
import java.security.PublicKey;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.management.InstanceAlreadyExistsException;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.NotCompliantMBeanException;
import javax.management.ObjectName;
import org.apache.log4j.Logger;
import org.apache.log4j.MDC;
import org.objectweb.proactive.ActiveObjectCreationException;
import org.objectweb.proactive.Body;
import org.objectweb.proactive.api.PAActiveObject;
import org.objectweb.proactive.api.PALifeCycle;
import org.objectweb.proactive.api.PARemoteObject;
import org.objectweb.proactive.core.Constants;
import org.objectweb.proactive.core.ProActiveException;
import org.objectweb.proactive.core.UniqueID;
import org.objectweb.proactive.core.body.AbstractBody;
import org.objectweb.proactive.core.body.ActiveBody;
import org.objectweb.proactive.core.body.Context;
import org.objectweb.proactive.core.body.LocalBodyStore;
import org.objectweb.proactive.core.body.UniversalBody;
import org.objectweb.proactive.core.body.ft.checkpointing.Checkpoint;
import org.objectweb.proactive.core.body.migration.MigrationException;
import org.objectweb.proactive.core.body.proxy.UniversalBodyProxy;
import org.objectweb.proactive.core.config.CentralPAPropertyRepository;
import org.objectweb.proactive.core.descriptor.data.ProActiveDescriptorInternal;
import org.objectweb.proactive.core.descriptor.data.VirtualNodeInternal;
import org.objectweb.proactive.core.descriptor.services.TechnicalService;
import org.objectweb.proactive.core.descriptor.util.RefactorPAD;
import org.objectweb.proactive.core.event.RuntimeRegistrationEvent;
import org.objectweb.proactive.core.event.RuntimeRegistrationEventProducerImpl;
import org.objectweb.proactive.core.filetransfer.FileTransferEngine;
import org.objectweb.proactive.core.gc.GarbageCollector;
import org.objectweb.proactive.core.httpserver.ClassServerServlet;
import org.objectweb.proactive.core.httpserver.HTTPServer;
import org.objectweb.proactive.core.jmx.mbean.JMXClassLoader;
import org.objectweb.proactive.core.jmx.mbean.ProActiveRuntimeWrapper;
import org.objectweb.proactive.core.jmx.mbean.ProActiveRuntimeWrapperMBean;
import org.objectweb.proactive.core.jmx.naming.FactoryName;
import org.objectweb.proactive.core.jmx.notification.GCMRuntimeRegistrationNotificationData;
import org.objectweb.proactive.core.jmx.notification.NotificationType;
import org.objectweb.proactive.core.jmx.notification.RuntimeNotificationData;
import org.objectweb.proactive.core.jmx.server.ServerConnector;
import org.objectweb.proactive.core.jmx.util.JMXNotificationManager;
import org.objectweb.proactive.core.mop.ConstructorCall;
import org.objectweb.proactive.core.mop.ConstructorCallExecutionFailedException;
import org.objectweb.proactive.core.mop.JavassistByteCodeStubBuilder;
import org.objectweb.proactive.core.mop.Utils;
import org.objectweb.proactive.core.node.Node;
import org.objectweb.proactive.core.node.NodeException;
import org.objectweb.proactive.core.node.NodeFactory;
import org.objectweb.proactive.core.node.NodeImpl;
import org.objectweb.proactive.core.process.UniversalProcess;
import org.objectweb.proactive.core.remoteobject.RemoteObjectExposer;
import org.objectweb.proactive.core.rmi.FileProcess;
import org.objectweb.proactive.core.runtime.broadcast.BroadcastDisabledException;
import org.objectweb.proactive.core.runtime.broadcast.RTBroadcaster;
import org.objectweb.proactive.core.security.PolicyServer;
import org.objectweb.proactive.core.security.ProActiveSecurity;
import org.objectweb.proactive.core.security.ProActiveSecurityManager;
import org.objectweb.proactive.core.security.SecurityConstants.EntityType;
import org.objectweb.proactive.core.security.SecurityContext;
import org.objectweb.proactive.core.security.SecurityEntity;
import org.objectweb.proactive.core.security.TypedCertificate;
import org.objectweb.proactive.core.security.crypto.KeyExchangeException;
import org.objectweb.proactive.core.security.crypto.SessionException;
import org.objectweb.proactive.core.security.domain.SecurityDomain;
import org.objectweb.proactive.core.security.exceptions.InvalidPolicyFile;
import org.objectweb.proactive.core.security.exceptions.RenegotiateSessionException;
import org.objectweb.proactive.core.security.exceptions.SecurityNotAvailableException;
import org.objectweb.proactive.core.security.securityentity.Entities;
import org.objectweb.proactive.core.security.securityentity.Entity;
import org.objectweb.proactive.core.util.ClassDataCache;
import org.objectweb.proactive.core.util.ProActiveInet;
import org.objectweb.proactive.core.util.ProActiveRandom;
import org.objectweb.proactive.core.util.URIBuilder;
import org.objectweb.proactive.core.util.log.Loggers;
import org.objectweb.proactive.core.util.log.ProActiveLogger;
/**
* <p>
* Implementation of ProActiveRuntime
* </p>
*
* @author The ProActive Team
* @version 1.0, 2001/10/23
* @since ProActive 0.91
*
*/
public class ProActiveRuntimeImpl extends RuntimeRegistrationEventProducerImpl implements ProActiveRuntime,
LocalProActiveRuntime {
//
// -- STATIC MEMBERS
// -----------------------------------------------------------
//
// the Unique instance of ProActiveRuntime
private static ProActiveRuntimeImpl proActiveRuntime;
// JMX
private static Logger jmxLogger = ProActiveLogger.getLogger(Loggers.JMX);
private static final Logger clLogger = ProActiveLogger.getLogger(Loggers.CLASSLOADING);
/**
*
* @return the proactive runtime associated to this jvm according to the
* current classloader
*/
private static synchronized ProActiveRuntimeImpl getProActiveRuntimeImpl() {
if (proActiveRuntime == null) {
try {
proActiveRuntime = new ProActiveRuntimeImpl();
proActiveRuntime.createMBean();
System.setProperty(PALifeCycle.PA_STARTED_PROP, "true");
if (CentralPAPropertyRepository.PA_RUNTIME_PING.isTrue()) {
new PARTPinger().start();
}
RTBroadcaster rtBrodcaster;
try {
rtBrodcaster = RTBroadcaster.getInstance();
// notify our presence on the lan
rtBrodcaster.sendCreation();
} catch (Exception e) {
// just keep it the feature is disabled
logger.debug("unable to activate RTBroadcast, reason is " + e.getMessage());
ProActiveLogger.logEatedException(logger, e);
}
} catch (Exception e) {
logger.fatal("Error while initializing ProActive Runtime", e);
throw new RuntimeException(e);
}
return proActiveRuntime;
} else {
return proActiveRuntime;
}
}
// runtime security manager
private static ProActiveSecurityManager runtimeSecurityManager;
// map of local nodes, key is node name
private Map<String, LocalNode> nodeMap;
//
// -- PRIVATE MEMBERS
// -----------------------------------------------------------
//
private VMInformationImpl vmInformation;
// map VirtualNodes and their names
private Map<String, VirtualNodeInternal> virtualNodesMap;
// map descriptor and their url
private Map<String, ProActiveDescriptorInternal> descriptorMap;
// map proActiveRuntime registered on this VM and their names
private Map<String, ProActiveRuntime> proActiveRuntimeMap;
private ProActiveRuntime parentRuntime;
protected RemoteObjectExposer<ProActiveRuntime> roe;
// JMX
/** The Server Connector to connect remotely to the JMX server */
private ServerConnector serverConnector;
private Object mutex = new Object();
/** The MBean representing this ProActive Runtime */
private ProActiveRuntimeWrapperMBean mbean;
private long gcmNodes;
//
// -- CONSTRUCTORS
// -----------------------------------------------------------
//
// singleton
protected ProActiveRuntimeImpl() throws ProActiveException {
try {
this.vmInformation = new VMInformationImpl();
this.proActiveRuntimeMap = new ConcurrentHashMap<String, ProActiveRuntime>();
this.virtualNodesMap = new ConcurrentHashMap<String, VirtualNodeInternal>();
this.descriptorMap = new ConcurrentHashMap<String, ProActiveDescriptorInternal>();
this.nodeMap = new ConcurrentHashMap<String, LocalNode>();
try {
String file = CentralPAPropertyRepository.PA_RUNTIME_SECURITY.getValue();
ProActiveSecurity.loadProvider();
if ((file != null) && new File(file).exists()) {
// loading security from a file
ProActiveRuntimeImpl.runtimeSecurityManager = new ProActiveSecurityManager(
EntityType.RUNTIME, file);
ProActiveLogger.getLogger(Loggers.SECURITY_RUNTIME).info(
"ProActive Security Policy (proactive.runtime.security) using " + file);
runtimeSecurityManager = runtimeSecurityManager.generateSiblingCertificate(
EntityType.RUNTIME, this.getVMInformation().getName());
// Is the runtime included within a Domain ?
String domainURL = CentralPAPropertyRepository.PA_RUNTIME_DOMAIN_URL.getValue();
if (domainURL != null) {
SecurityEntity domain = PAActiveObject.lookupActive(SecurityDomain.class, domainURL);
ProActiveRuntimeImpl.runtimeSecurityManager.setParent(domain);
}
} else {
ProActiveLogger
.getLogger(Loggers.SECURITY_RUNTIME)
.debug(
"ProActive Security Policy (proactive.runtime.security) not set. Runtime Security disabled ");
}
} catch (InvalidPolicyFile e) {
e.printStackTrace();
} catch (ActiveObjectCreationException e) {
e.printStackTrace();
}
// System.out.println(vmInformation.getVMID().toString());
} catch (UnknownHostException e) {
// System.out.println();
logger.fatal(" !!! Cannot do a reverse lookup on that host");
// System.out.println();
e.printStackTrace();
System.exit(1);
} catch (IOException e) {
e.printStackTrace();
}
// Remote Object exporter
this.roe = new RemoteObjectExposer<ProActiveRuntime>("ProActiveRuntime_" +
vmInformation.getHostName() + "_" + vmInformation.getVMID(),
org.objectweb.proactive.core.runtime.ProActiveRuntime.class.getName(), this,
ProActiveRuntimeRemoteObjectAdapter.class);
this.roe.createRemoteObject(vmInformation.getName(), false);
if (CentralPAPropertyRepository.PA_CLASSLOADING_USEHTTP.isTrue()) {
// Set the codebase in case of useHTTP is true and the
// ProActiveRMIClassLoader is in use
String codebase = ClassServerServlet.get().getCodeBase();
CentralPAPropertyRepository.PA_CODEBASE.setValue(codebase);
} else {
// Publish the URL of this runtime in the ProActive codebase
// URL must be prefixed by pa tu use our custom protocol handlers
// URL must be terminated by a / according to the RMI specification
CentralPAPropertyRepository.PA_CODEBASE.setValue("pa" + this.getURL() + "/");
}
if (CentralPAPropertyRepository.PA_CLASSLOADING_USEHTTP.isTrue()) {
// Set the codebase in case of useHTTP is true and the
// ProActiveRMIClassLoader is in use
String codebase = ClassServerServlet.get().getCodeBase();
CentralPAPropertyRepository.PA_CODEBASE.setValue(codebase);
} else {
// Publish the URL of this runtime in the ProActive codebase
// URL must be prefixed by pa tu use our custom protocol handlers
// URL must be terminated by a / according to the RMI specification
CentralPAPropertyRepository.PA_CODEBASE.setValue("pa" + this.getURL() + "/");
}
// logging info
MDC.remove("runtime");
MDC.put("runtime", getURL());
}
//
// -- PUBLIC METHODS
// -----------------------------------------------------------
//
public static ProActiveRuntimeImpl getProActiveRuntime() {
return getProActiveRuntimeImpl();
}
/**
* If no ServerConnector has been created, a new one is created and started.
* Any ProActive JMX Connector Client can connect to it remotely and manage
* the MBeans.
*
* @return the ServerConnector associated to this ProActiveRuntime
*/
public void startJMXServerConnector() {
synchronized (mutex) {
if (serverConnector == null) {
createServerConnector();
}
}
}
/**
* @inheritDoc
*/
@Override
public ProActiveRuntimeWrapperMBean getMBean() {
return mbean;
}
/**
* @inheritDoc
*/
@Override
public String getMBeanServerName() {
return URIBuilder.getNameFromURI(getProActiveRuntimeImpl().getURL());
}
/**
* @inheritDoc
*/
@Override
public ServerConnector getJMXServerConnector() {
return serverConnector;
}
//
// -- Implements LocalProActiveRuntime
// -----------------------------------------------
//
/**
* @inheritDoc
*/
@Override
public void registerLocalVirtualNode(VirtualNodeInternal vn, String vnName) {
// System.out.println("vn "+vnName+" registered");
this.virtualNodesMap.put(vnName, vn);
}
/**
* @inheritDoc
*/
@Override
public void setParent(ProActiveRuntime parentPARuntime) {
if (this.parentRuntime == null) {
this.parentRuntime = parentPARuntime;
} else {
runtimeLogger.error("Parent runtime already set!");
}
}
public void registerDescriptor(String url, ProActiveDescriptorInternal pad) {
this.descriptorMap.put(url, pad);
}
/**
* @inheritDoc
*/
@Override
public ProActiveDescriptorInternal getDescriptor(String url, boolean isHierarchicalSearch)
throws IOException, ProActiveException {
ProActiveDescriptorInternal pad = this.descriptorMap.get(url);
// hierarchical search or not, look if we know the pad
if (pad != null) {
// if pad found and hierarchy search return pad with no main
if (isHierarchicalSearch) {
return RefactorPAD.buildNoMainPAD(pad);
} else {
// if not hierarchy search, return the normal pad
return pad;
}
} else if (!isHierarchicalSearch) {
return null; // pad == null
} else { // else search pad in parent runtime
if (this.parentRuntime == null) {
throw new IOException(
"Descriptor cannot be found hierarchically since this runtime has no parent");
}
return this.parentRuntime.getDescriptor(url, true);
}
}
public void removeDescriptor(String url) {
this.descriptorMap.remove(url);
}
/**
* Creates a Server Connector
*/
private void createServerConnector() {
// One the Serverconnector is launched any ProActive JMX Connector
// client can connect to it remotely and manage the MBeans.
serverConnector = new ServerConnector(URIBuilder.getNameFromURI(getProActiveRuntimeImpl().getURL()));
try {
serverConnector.start();
} catch (IOException e) {
jmxLogger.error("Can't start the JMX Connector in the ProActive Runtime", e);
}
}
/**
* Creates the MBean associated to the ProActiveRuntime
*/
protected void createMBean() {
// JMX registration
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
JMXClassLoader jmxClassLoader = new JMXClassLoader(classLoader);
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
ObjectName objectName = null;
try {
objectName = new ObjectName("org.objectweb.proactive:type=JMXClassLoader");
} catch (MalformedObjectNameException e) {
jmxLogger.error("Can't create the objectName of the JMX ClassLoader MBean", e);
} catch (NullPointerException e) {
jmxLogger.error("Can't create the objectName of the JMX ClassLoader MBean", e);
}
try {
mbs.registerMBean(jmxClassLoader, objectName);
} catch (InstanceAlreadyExistsException e) {
jmxLogger.debug("A MBean with the object name " + objectName + " already exists", e);
} catch (MBeanRegistrationException e) {
jmxLogger.error("Can't register the MBean of the JMX ClassLoader", e);
} catch (NotCompliantMBeanException e) {
jmxLogger.error("The MBean of the JMX ClassLoader is not JMX compliant", e);
}
String runtimeUrl = getProActiveRuntimeImpl().getURL();
objectName = FactoryName.createRuntimeObjectName(runtimeUrl);
if (!mbs.isRegistered(objectName)) {
mbean = new ProActiveRuntimeWrapper(getProActiveRuntimeImpl());
try {
mbs.registerMBean(mbean, objectName);
} catch (InstanceAlreadyExistsException e) {
jmxLogger.error("A MBean with the object name " + objectName + " already exists", e);
} catch (MBeanRegistrationException e) {
jmxLogger.error("Can't register the MBean of the ProActive Runtime", e);
} catch (NotCompliantMBeanException e) {
jmxLogger.error("The MBean of the ProActive Runtime is not JMX compliant", e);
}
}
}
//
// -- Implements ProActiveRuntime
// -----------------------------------------------
//
/**
* @inheritDoc
*/
@Override
public Node createLocalNode(String nodeName, boolean replacePreviousBinding,
ProActiveSecurityManager nodeSecurityManager, String vnName) throws NodeException,
AlreadyBoundException {
if (!replacePreviousBinding && (this.nodeMap.get(nodeName) != null)) {
throw new AlreadyBoundException("Node " + nodeName +
" already created on this ProActiveRuntime. To overwrite this node, use true for replacePreviousBinding");
}
if (nodeSecurityManager != null) {
// setting the current runtime as parent entity of the node
nodeSecurityManager.setParent(this);
}
try {
LocalNode localNode = new LocalNode(nodeName, nodeSecurityManager, vnName, replacePreviousBinding);
if (replacePreviousBinding && (this.nodeMap.get(nodeName) != null)) {
localNode.setActiveObjects(this.nodeMap.get(nodeName).getActiveObjectsId());
this.nodeMap.remove(nodeName);
}
this.nodeMap.put(nodeName, localNode);
Node node = null;
try {
node = new NodeImpl((ProActiveRuntime) PARemoteObject.lookup(URI.create(localNode.getURL())),
localNode.getURL());
} catch (ProActiveException e) {
throw new NodeException("Failed to created NodeImpl", e);
}
return node;
} catch (ProActiveException e) {
throw new NodeException("Failed to create the LocalNode for " + nodeName, e);
}
}
/**
* @inheritDoc
*/
@Override
public Node createGCMNode(ProActiveSecurityManager nodeSecurityManager, String vnName,
List<TechnicalService> tsList) throws NodeException, AlreadyBoundException {
if (gcmNodes >= vmInformation.capacity) {
logger.warn("Runtime capacity exceeded. A bug inside GCM Deployment occured");
}
String nodeName = this.vmInformation.getName() + "_" + Constants.GCM_NODE_NAME + gcmNodes;
Node node = null;
try {
node = createLocalNode(nodeName, false, nodeSecurityManager, vnName);
for (TechnicalService ts : tsList) {
ts.apply(node);
}
} catch (NodeException e) {
// Cannot do something here. This node will node be created
logger.warn("Failed to create a capacity node", e);
} catch (AlreadyBoundException e) {
// CapacityNode- is a reserved name space.
// Should not happen, log it and delete the old node
logger.warn(nodeName + "is already registered... replacing it !");
try {
createLocalNode(nodeName, true, null, vnName);
} catch (NodeException e1) {
logger.warn("Failed to create a capacity node", e1);
} catch (AlreadyBoundException e1) {
// Cannot be thrown since replacePreviousBinding = true
logger.warn("Impossible exception ! Check Me !", e1);
}
}
gcmNodes++;
return node;
}
/**
* @inheritDoc
*/
@Override
public void killAllNodes() {
for (Map.Entry<String, LocalNode> e : this.nodeMap.entrySet()) {
String nodeName = e.getKey();
killNode(nodeName);
}
}
/**
* @inheritDoc
*/
@Override
public void killNode(String nodeName) {
LocalNode localNode = this.nodeMap.get(nodeName);
if (localNode != null) {
localNode.terminate();
}
this.nodeMap.remove(nodeName);
}
/**
* @inheritDoc
*/
@Override
public void createVM(UniversalProcess remoteProcess) throws java.io.IOException {
remoteProcess.startProcess();
}
/**
* @inheritDoc
*/
@Override
public String[] getLocalNodeNames() {
int i = 0;
String[] nodeNames;
synchronized (this.nodeMap) {
nodeNames = new String[this.nodeMap.size()];
for (Map.Entry<String, LocalNode> e : this.nodeMap.entrySet()) {
nodeNames[i] = e.getKey();
i++;
}
}
return nodeNames;
}
/**
* Returns a snapshot of all the local nodes
*
* The collection is a copy and is never updated by the ProActive Runtime.
*
* @return all the local nodes
*/
public Collection<LocalNode> getLocalNodes() {
return this.nodeMap.values();
}
/**
* @inheritDoc
*/
@Override
public VMInformation getVMInformation() {
return this.vmInformation;
}
/**
* @inheritDoc
*/
@Override
public void register(ProActiveRuntime proActiveRuntimeDist, String proActiveRuntimeName,
String creatorID, String creationProtocol, String vmName) {
// System.out.println("register in Impl");
// System.out.println("thread"+Thread.currentThread().getName());
// System.out.println(vmInformation.getVMID().toString());
this.proActiveRuntimeMap.put(proActiveRuntimeName, proActiveRuntimeDist);
// ProActiveEvent
notifyListeners(this, RuntimeRegistrationEvent.RUNTIME_REGISTERED, proActiveRuntimeDist, creatorID,
creationProtocol, vmName);
// END ProActiveEvent
// JMX Notification
if (getMBean() != null) {
RuntimeNotificationData notificationData = new RuntimeNotificationData(creatorID,
proActiveRuntimeDist.getURL(), creationProtocol, vmName);
getMBean().sendNotification(NotificationType.runtimeRegistered, notificationData);
}
// END JMX Notification
}
/**
* @inheritDoc
*/
@Override
public void unregister(ProActiveRuntime proActiveRuntimeDist, String proActiveRuntimeUrl,
String creatorID, String creationProtocol, String vmName) {
this.proActiveRuntimeMap.remove(proActiveRuntimeUrl);
// ProActiveEvent
notifyListeners(this, RuntimeRegistrationEvent.RUNTIME_UNREGISTERED, proActiveRuntimeDist, creatorID,
creationProtocol, vmName);
// END ProActiveEvent
// JMX Notification
if (getMBean() != null) {
RuntimeNotificationData notificationData = new RuntimeNotificationData(creatorID,
proActiveRuntimeDist.getURL(), creationProtocol, vmName);
getMBean().sendNotification(NotificationType.runtimeUnregistered, notificationData);
}
// END JMX Notification
}
/**
* @inheritDoc
*/
@Override
public ProActiveRuntime[] getProActiveRuntimes() {
if (this.proActiveRuntimeMap != null) {
return this.proActiveRuntimeMap.values().toArray(new ProActiveRuntime[] {});
} else {
return null;
}
}
/**
* @inheritDoc
*/
@Override
public ProActiveRuntime getProActiveRuntime(String proActiveRuntimeName) {
return this.proActiveRuntimeMap.get(proActiveRuntimeName);
}
/**
* @inheritDoc
*/
@Override
public synchronized void killRT(boolean softly) {
cleanJvmFromPA();
// END JMX unregistration
System.exit(0);
}
public synchronized void cleanJvmFromPA() {
// JMX Notification
if (getMBean() != null) {
getMBean().sendNotification(NotificationType.runtimeDestroyed);
}
// END JMX Notification
// terminates the nodes and their active objects
killAllNodes();
logger.info("terminating Runtime " + vmInformation.getName());
// JMX unregistration
if (getMBean() != null) {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
ObjectName objectName = getMBean().getObjectName();
if (mbs.isRegistered(objectName)) {
try {
mbs.unregisterMBean(objectName);
} catch (InstanceNotFoundException e) {
jmxLogger.error("The MBean with the objectName " + objectName + " was not found", e);
} catch (MBeanRegistrationException e) {
jmxLogger.error("The MBean with the objectName " + objectName +
" can't be unregistered from the MBean server", e);
}
}
mbean = null;
}
// terminate the broadcast thread if exist
RTBroadcaster broadcaster;
try {
broadcaster = RTBroadcaster.getInstance();
broadcaster.kill();
} catch (BroadcastDisabledException e1) {
// just display the message
logger.debug(e1.getMessage());
}
Iterator<UniversalBody> bodies = LocalBodyStore.getInstance().getLocalBodies().bodiesIterator();
UniversalBody body;
while (bodies.hasNext()) {
try {
body = bodies.next();
((Body) body).terminate();
} catch (Throwable e) {
e.printStackTrace();
}
}
Iterator<UniversalBody> halfBodies = LocalBodyStore.getInstance().getLocalHalfBodies()
.bodiesIterator();
UniversalBody halfBody;
while (halfBodies.hasNext()) {
try {
halfBody = halfBodies.next();
((Body) halfBody).terminate();
} catch (Throwable e) {
e.printStackTrace();
}
}
// unexport the runtime
try {
this.roe.unexportAll();
} catch (ProActiveException e) {
logger.warn("unable to unexport the runtime", e);
}
try {
HTTPServer.get().stop();
HTTPServer.get().destroy();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
this.roe = null;
proActiveRuntime = null;
}
/**
* @inheritDoc
*/
@Override
public String getURL() {
return this.roe.getURL();
}
/**
* @inheritDoc
*/
@Override
public List<UniversalBody> getActiveObjects(String nodeName) {
// the array to return
List<UniversalBody> localBodies = new ArrayList<UniversalBody>();
LocalBodyStore localBodystore = LocalBodyStore.getInstance();
List<UniqueID> bodyList = this.nodeMap.get(nodeName).getActiveObjectsId();
if (bodyList == null) {
// Probably the node is killed
return localBodies;
}
synchronized (bodyList) {
for (int i = 0; i < bodyList.size(); i++) {
UniqueID bodyID = bodyList.get(i);
// check if the body is still on this vm
Body body = localBodystore.getLocalBody(bodyID);
if (body == null) {
// runtimeLogger.warn("body null");
// the body with the given ID is not any more on this
// ProActiveRuntime
// unregister it from this ProActiveRuntime
unregisterBody(nodeName, bodyID);
} else {
// the body is on this runtime then return adapter and class
// name of the reified
// object to enable the construction of stub-proxy couple.
localBodies.add(0, body.getRemoteAdapter());
}
}
return localBodies;
}
}
/**
* @inheritDoc
*/
@Override
public VirtualNodeInternal getVirtualNode(String virtualNodeName) {
// System.out.println("i am in get vn ");
return this.virtualNodesMap.get(virtualNodeName);
}
/**
* @inheritDoc
*/
@Override
public void registerVirtualNode(String virtualNodeName, boolean replacePreviousBinding)
throws ProActiveException {
this.roe.createRemoteObject(virtualNodeName, false);
}
/**
* @inheritDoc
*/
@Override
public void unregisterVirtualNode(String virtualNodeName) {
VirtualNodeInternal vn = virtualNodesMap.get(virtualNodeName);
if (vn != null) {
JMXNotificationManager.getInstance().unsubscribe(getMBean().getObjectName(), vn);
}
virtualNodesMap.remove(virtualNodeName);
}
/**
* @inheritDoc
*/
@Override
public void unregisterAllVirtualNodes() {
this.virtualNodesMap.clear();
}
/**
* @inheritDoc
*/
@Override
public List<UniversalBody> getActiveObjects(String nodeName, String className) {
// the array to return
ArrayList<UniversalBody> localBodies = new ArrayList<UniversalBody>();
LocalBodyStore localBodystore = LocalBodyStore.getInstance();
List<UniqueID> bodyList = this.nodeMap.get(nodeName).getActiveObjectsId();
if (bodyList == null) {
// Probably the node is killed
return localBodies;
}
synchronized (bodyList) {
for (int i = 0; i < bodyList.size(); i++) {
UniqueID bodyID = bodyList.get(i);
// check if the body is still on this vm
Body body = localBodystore.getLocalBody(bodyID);
if (body == null) {
// runtimeLogger.warn("body null");
// the body with the given ID is not any more on this
// ProActiveRuntime
// unregister it from this ProActiveRuntime
unregisterBody(nodeName, bodyID);
} else {
String objectClass = body.getReifiedObject().getClass().getName();
// if the reified object is of the specified type
// return the body adapter
if (objectClass.equals(className)) {
localBodies.add(body.getRemoteAdapter());
}
}
}
return localBodies;
}
}
/**
* @inheritDoc
*/
@Override
public UniversalBody createBody(String nodeName, ConstructorCall bodyConstructorCall, boolean isLocal)
throws ConstructorCallExecutionFailedException, java.lang.reflect.InvocationTargetException,
ActiveObjectCreationException {
if (NodeFactory.isHalfBodiesNode(nodeName)) {
throw new ActiveObjectCreationException(
"Cannot create an active object on the reserved halfbodies node.");
}
Body localBody = (Body) bodyConstructorCall.execute();
// SECURITY
ProActiveSecurityManager objectSecurityManager = ((AbstractBody) localBody)
.getProActiveSecurityManager();
if (objectSecurityManager != null) {
objectSecurityManager.setParent(this.nodeMap.get(nodeName));
}
ProActiveLogger.getLogger(Loggers.RUNTIME).debug("nodeName " + nodeName);
registerBody(nodeName, localBody);
if (GarbageCollector.dgcIsEnabled()) {
((AbstractBody) localBody).updateReferences(UniversalBodyProxy.getIncomingReferences());
}
if (isLocal) {
// if the body and proxy are on the same vm, returns the local view
// System.out.println("body and proxy on the same vm");
// System.out.println(localBody.getReifiedObject().getClass().getName());
// register the body in the nodemap
return localBody;
} else {
// otherwise return the adapter
// System.out.println ("RemoteProActiveImpl.createBody
// "+vmInformation.getInetAddress().getHostName() +" -> new
// "+bodyConstructorCall.getTargetClassName()+" on node "+nodeName);
// System.out.println ("RemoteProActiveRuntimeImpl.localBody created
// localBody="+localBody+" on node "+nodeName);
return localBody.getRemoteAdapter();
}
}
/**
* @inheritDoc
*/
@Override
public UniversalBody receiveBody(String nodeName, Body body) throws MigrationException {
ProActiveSecurityManager psm = ((AbstractBody) body).getProActiveSecurityManager();
if (psm != null) {
psm.setParent(this.nodeMap.get(nodeName));
}
if (NodeFactory.isHalfBodiesNode(nodeName)) {
throw new MigrationException("Cannot migrate an active object on the reserved halfbodies node.");
}
registerBody(nodeName, body);
// register futures that have been deserialized in the body
((AbstractBody) body).registerIncomingFutures();
return body.getRemoteAdapter();
}
/**
* @inheritDoc
*/
@Override
public UniversalBody receiveCheckpoint(String nodeURL, Checkpoint ckpt, int inc)
throws ProActiveException {
runtimeLogger.debug("Receive a checkpoint for recovery");
if (NodeFactory.isHalfBodiesNode(nodeURL)) {
throw new ProActiveException("Cannot recover an active object on the reserved halfbodies node.");
}
// the recovered body
Body ret = ckpt.recover();
// update node url
ret.updateNodeURL(nodeURL);
String nodeName = URIBuilder.getNameFromURI(nodeURL);
// push the initial context for the current thread.
// need to register as thread of the corresponding active object: this
// thread
// may send looged requests or logged replies
LocalBodyStore.getInstance().pushContext(new Context(ret, null));
try {
((AbstractBody) ret).getFTManager().beforeRestartAfterRecovery(ckpt.getCheckpointInfo(), inc);
} finally {
// remove contexts for the current thread
LocalBodyStore.getInstance().clearAllContexts();
}
// register the body
this.registerBody(nodeName, ret);
// register futures that have been deserialized in the body
((AbstractBody) ret).registerIncomingFutures();
// restart actvity
if (runtimeLogger.isDebugEnabled()) {
runtimeLogger.debug(ret.getID() + " is restarting activity...");
}
((ActiveBody) ret).startBody();
// no more need to return the recovered body
return null;
}
/**
* Registers the specified body in the node with the nodeName key. In fact
* it is the <code>UniqueID</code> of the body that is attached to the node.
*
* @param nodeName
* The name where to attached the body in the
* <code>hostsMap</code>
* @param body
* The body to register
*/
private void registerBody(String nodeName, Body body) {
UniqueID bodyID = body.getID();
List<UniqueID> bodyList = this.nodeMap.get(nodeName).getActiveObjectsId();
synchronized (bodyList) {
if (!bodyList.contains(bodyID)) {
// System.out.println("in registerbody id = "+
// bodyID.toString());
bodyList.add(bodyID);
}
}
}
/**
* Unregisters the specified <code>UniqueID</code> from the node
* corresponding to the nodeName key
*
* @param nodeName
* The key where to remove the <code>UniqueID</code>
* @param bodyID
* The <code>UniqueID</code> to remove
*/
private void unregisterBody(String nodeName, UniqueID bodyID) {
// System.out.println("in remove id= "+ bodyID.toString());
// System.out.println("array size
// "+((ArrayList)hostsMap.get(nodeName)).size());
List<UniqueID> bodyList = this.nodeMap.get(nodeName).getActiveObjectsId();
synchronized (bodyList) {
bodyList.remove(bodyID);
// System.out.println("array size
// "+((ArrayList)hostsMap.get(nodeName)).size());
}
}
// SECURITY
/**
* set the runtime security manager
*/
public static void setProActiveSecurityManager(ProActiveSecurityManager server) {
if (runtimeSecurityManager != null) {
return;
}
runtimeSecurityManager = server;
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#getEntities(java
* .lang.String)
*/
public Entities getEntities(String nodeName) {
ProActiveSecurityManager nodeSecurityManager = this.nodeMap.get(nodeName).getSecurityManager();
Entities entities = this.getEntities();
if (nodeSecurityManager != null) {
entities.add(new Entity(nodeSecurityManager.getMyCertificateChain()));
}
return entities;
}
/**
* the runtime looks for a matching security entity whithin its nodes and
* active objects
*
* @param securityEntity
* the security entity looked for.
* @return matching entities
*/
public Entities getEntities(SecurityEntity securityEntity) {
if (true) {
throw new RuntimeException();
}
return null;
// hostsMap.
// try {
// System.out.println(" testing for securityentityID " +
// securityEntity);
// for (Enumeration e = hostsMap.keys(); e.hasMoreElements();) {
// String node = (String) e.nextElement();
//
// System.out.println("testing for node " + node);
// ArrayList listAO = (ArrayList) hostsMap.get(node);
//
// for (int i = 0; i < listAO.size(); i++) {
// UniqueID localBodyID = (UniqueID) listAO.get(i);
// System.out.println(" testing against localBbodyID " +
// localBodyID);
//
// if (securityEntity.getCertificate().equals(localBodyID)) {
// ArrayList a = new ArrayList();
//
// ProActiveSecurityManager nodeSecurityManager =
// (ProActiveSecurityManager) nodeSecurityManagerMap.get(node);
// PolicyServer nodePolicyServer =
// nodeSecurityManager.getPolicyServer();
//
// if (nodePolicyServer != null) {
// EntityVirtualNode entityVirtualNode = new
// EntityVirtualNode(nodeSecurityManager.getVNName(),
// nodePolicyServer.getApplicationCertificate(),
// nodeSecurityManager.getCertificate());
// a.add(entityVirtualNode);
// return a;
// }
// }
//
// }
// }
// } catch (SecurityNotAvailableException e1) {
// e1.printStackTrace();
// } catch (IOException e1) {
// e1.printStackTrace();
// }
// return new ArrayList();
}
/*
* (non-Javadoc)
*
* @see org.objectweb.proactive.core.runtime.ProActiveRuntime#getEntities()
*/
public Entities getEntities() {
Entities entities = new Entities();
if (runtimeSecurityManager != null) {
entities.add(new Entity(runtimeSecurityManager.getMyCertificateChain()));
return entities;
}
return null;
}
/**
* @param local
* @param distant
*/
public SecurityContext getPolicy(Entities local, Entities distant) throws SecurityNotAvailableException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
// PolicyServer policyServer = runtimeSecurityManager.getPolicyServer();
return runtimeSecurityManager.getPolicy(local, distant);
}
public synchronized byte[] getClassData(String className) {
byte[] classData = null;
// Check class data cache (already generated stub)
classData = ClassDataCache.instance().getClassData(className);
if (classData != null) {
return classData;
} else {
if (clLogger.isTraceEnabled()) {
clLogger.trace(className + " is not in the class data cache");
}
}
// Look in classpath
try {
classData = FileProcess.getBytesFromResource(className);
if (classData != null) {
if (clLogger.isTraceEnabled()) {
clLogger.trace("Found " + className + " in the classpath");
}
return classData;
} else {
if (clLogger.isTraceEnabled()) {
clLogger.trace("Failed to find " + className + " in classpath");
}
}
} catch (IOException e2) {
Logger l = ProActiveLogger.getLogger(Loggers.CLASSLOADING);
ProActiveLogger.logEatedException(l, e2);
}
// Generate stub
classData = generateStub(className);
if (classData != null) {
if (clLogger.isTraceEnabled()) {
clLogger.trace("Generated " + className + " stub");
}
return classData;
} else {
if (clLogger.isTraceEnabled()) {
clLogger.trace("Failed to generate stub for " + className);
}
}
return null;
}
public void launchMain(String className, String[] parameters) throws ClassNotFoundException,
NoSuchMethodException, ProActiveException {
System.out.println("ProActiveRuntimeImpl.launchMain() -" + className + "-");
Class<?> mainClass = Class.forName(className);
Method mainMethod = mainClass.getMethod("main", new Class[] { String[].class });
new LauncherThread(mainMethod, parameters).start();
}
public void newRemote(String className) throws ClassNotFoundException, ProActiveException {
Class<?> remoteClass = Class.forName(className);
new LauncherThread(remoteClass).start();
}
// tries to generate a stub without using MOP methods
private byte[] generateStub(String className) {
byte[] classData = null;
if (Utils.isStubClassName(className)) {
// do not use directly MOP methods (avoid classloader cycles)
String classname = Utils.convertStubClassNameToClassName(className);
classData = JavassistByteCodeStubBuilder.create(classname, null);
if (classData != null) {
ClassDataCache.instance().addClassData(className, classData);
return classData;
}
}
// try to get the class as a generated component interface reference
classData = org.objectweb.proactive.core.component.gen.Utils.getClassData(className);
if (classData != null) {
ClassDataCache.instance().addClassData(className, classData);
return classData;
}
return null;
}
/**
* @inheritDoc
*/
@Override
public void terminateSession(long sessionID) throws SecurityNotAvailableException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
runtimeSecurityManager.terminateSession(sessionID);
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#getCertificate()
*/
public TypedCertificate getCertificate() throws SecurityNotAvailableException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
return runtimeSecurityManager.getCertificate();
}
/*
* (non-Javadoc)
*
* @see org.objectweb.proactive.core.runtime.ProActiveRuntime#
* getProActiveSecurityManager()
*/
public ProActiveSecurityManager getProActiveSecurityManager() {
return runtimeSecurityManager;
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#startNewSession
* (org.objectweb.proactive.ext.security.Communication)
*/
public long startNewSession(long distantSessionID, SecurityContext policy,
TypedCertificate distantCertificate) throws SecurityNotAvailableException, SessionException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
return runtimeSecurityManager.startNewSession(distantSessionID, policy, distantCertificate);
}
/*
* (non-Javadoc)
*
* @see org.objectweb.proactive.core.runtime.ProActiveRuntime#getPublicKey()
*/
public PublicKey getPublicKey() throws SecurityNotAvailableException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
return runtimeSecurityManager.getPublicKey();
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#randomValue(long,
* byte[])
*/
public byte[] randomValue(long sessionID, byte[] clientRandomValue) throws SecurityNotAvailableException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
try {
return runtimeSecurityManager.randomValue(sessionID, clientRandomValue);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#publicKeyExchange
* (long, org.objectweb.proactive.core.body.UniversalBody, byte[], byte[],
* byte[])
*/
public byte[] publicKeyExchange(long sessionID, byte[] signature) throws SecurityNotAvailableException,
RenegotiateSessionException {
if (runtimeSecurityManager != null) {
try {
return runtimeSecurityManager.publicKeyExchange(sessionID, signature);
} catch (KeyExchangeException e) {
e.printStackTrace();
}
} else {
throw new SecurityNotAvailableException();
}
return null;
}
/*
* (non-Javadoc)
*
* @see
* org.objectweb.proactive.core.runtime.ProActiveRuntime#secretKeyExchange
* (long, byte[], byte[], byte[], byte[], byte[])
*/
public byte[][] secretKeyExchange(long sessionID, byte[] encodedAESKey, byte[] encodedIVParameters,
byte[] encodedClientMacKey, byte[] encodedLockData, byte[] parametersSignature)
throws SecurityNotAvailableException, RenegotiateSessionException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
return runtimeSecurityManager.secretKeyExchange(sessionID, encodedAESKey, encodedIVParameters,
encodedClientMacKey, encodedLockData, parametersSignature);
}
// /*
// * (non-Javadoc)
// *
// * @see
// org.objectweb.proactive.core.runtime.ProActiveRuntime#getCertificateEncoded()
// */
// public byte[] getCertificateEncoded() throws
// SecurityNotAvailableException {
// if (runtimeSecurityManager == null) {
// throw new SecurityNotAvailableException();
// }
// return runtimeSecurityManager.getCertificateEncoded();
// }
public String getVNName(String nodename) throws ProActiveException {
return this.nodeMap.get(nodename).getVirtualNodeName();
}
//
// -- INNER CLASSES -----------------------------------------------
//
protected static class VMInformationImpl implements VMInformation, java.io.Serializable {
private final java.net.InetAddress hostInetAddress;
// the Unique ID of the JVM
private final java.rmi.dgc.VMID uniqueVMID;
private String name;
private long capacity;
private final String hostName;
private long deploymentId;
private long topologyId;
private String vmName;
public VMInformationImpl() throws java.net.UnknownHostException {
this.uniqueVMID = UniqueID.getCurrentVMID();
this.hostInetAddress = ProActiveInet.getInstance().getInetAddress();
this.hostName = URIBuilder.getHostNameorIP(this.hostInetAddress);
String random = Integer.toString(ProActiveRandom.nextPosInt());
if (CentralPAPropertyRepository.PA_RUNTIME_NAME.isSet()) {
this.name = CentralPAPropertyRepository.PA_RUNTIME_NAME.getValue();
} else {
this.name = "PA_JVM" + random; // + "_" + this.hostName;
}
this.capacity = -1;
this.deploymentId = -1;
this.topologyId = -1;
this.vmName = null;
}
//
// -- PUBLIC METHODS -----------------------------------------------
//
//
// -- implements VMInformation
// -----------------------------------------------
//
public java.rmi.dgc.VMID getVMID() {
return this.uniqueVMID;
}
public String getName() {
return this.name;
}
public java.net.InetAddress getInetAddress() {
return this.hostInetAddress;
}
/**
* @see org.objectweb.proactive.core.runtime.VMInformation#getHostName()
*/
public String getHostName() {
return this.hostName;
}
/**
* @see org.objectweb.proactive.core.runtime.VMInformation#getDescriptorVMName()
*/
public String getDescriptorVMName() {
return this.vmName;
}
public long getCapacity() {
return capacity;
}
private void setCapacity(long capacity) {
this.capacity = capacity;
}
public long getTopologyId() {
return topologyId;
}
private void setTopologyId(long topologyId) {
this.topologyId = topologyId;
}
public long getDeploymentId() {
return deploymentId;
}
private void setDeploymentId(long deploymentId) {
this.deploymentId = deploymentId;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = (prime * result) + ((uniqueVMID == null) ? 0 : uniqueVMID.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final VMInformationImpl other = (VMInformationImpl) obj;
if (uniqueVMID == null) {
if (other.uniqueVMID != null) {
return false;
}
} else if (!uniqueVMID.equals(other.uniqueVMID)) {
return false;
}
return true;
}
}
//
// ----------------- INNER CLASSES --------------------------------
//
/**
* inner class for method invocation
*/
private class LauncherThread extends Thread {
private final boolean launchMain;
private Method mainMethod;
private Class<?> remoteClass;
private String[] parameters;
public LauncherThread(Class<?> remoteClass) {
this.remoteClass = remoteClass;
this.launchMain = false;
}
public LauncherThread(Method mainMethod, String[] parameters) {
this.mainMethod = mainMethod;
this.parameters = parameters;
this.launchMain = true;
}
@Override
public void run() {
if (this.launchMain) {
try {
this.mainMethod.invoke(null, new Object[] { this.parameters });
} catch (InvocationTargetException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
} else {
try {
this.remoteClass.newInstance();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InstantiationException e) {
e.printStackTrace();
}
}
}
}
/**
* @see org.objectweb.proactive.core.runtime.ProActiveRuntime#setLocalNodeProperty(java.lang.String,
* java.lang.String, java.lang.String)
*/
public Object setLocalNodeProperty(String nodeName, String key, String value) {
return this.nodeMap.get(nodeName).setProperty(key, value);
}
/**
* @see org.objectweb.proactive.core.runtime.ProActiveRuntime#getLocalNodeProperty(java.lang.String,
* java.lang.String)
*/
public String getLocalNodeProperty(String nodeName, String key) {
return this.nodeMap.get(nodeName).getProperty(key);
}
public RemoteObjectExposer<ProActiveRuntime> getRemoteObjectExposer() {
return this.roe;
}
public String[] getURLs() {
return this.roe.getURLs();
}
public ProActiveSecurityManager getProActiveSecurityManager(Entity user)
throws SecurityNotAvailableException, AccessControlException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
return runtimeSecurityManager.getProActiveSecurityManager(user);
}
public void setProActiveSecurityManager(Entity user, PolicyServer policyServer)
throws SecurityNotAvailableException, AccessControlException {
if (runtimeSecurityManager == null) {
throw new SecurityNotAvailableException();
}
runtimeSecurityManager.setProActiveSecurityManager(user, policyServer);
}
public void setCapacity(long capacity) {
if (vmInformation.getCapacity() > 0) {
throw new IllegalStateException("setCapacity already set to " + vmInformation.getCapacity());
}
if (capacity < 1) {
throw new IllegalArgumentException(capacity +
" is not a valid parameter for setCapicity. Must be a strictly positive long");
}
logger.debug("Capacity set to " + capacity + ". Creating the nodes...");
vmInformation.setCapacity(capacity);
}
public void register(GCMRuntimeRegistrationNotificationData notification) {
// createRegistrationForwarder();
getMBean().sendNotification(NotificationType.GCMRuntimeRegistered, notification);
}
public FileTransferEngine getFileTransferEngine() {
return FileTransferEngine.getFileTransferEngine();
}
public void addDeployment(long deploymentId) {
}
public void setDeploymentId(long deploymentId) {
vmInformation.setDeploymentId(deploymentId);
}
public void setTopologyId(long toplogyId) {
vmInformation.setTopologyId(toplogyId);
}
public void setVMName(String vmName) {
vmInformation.vmName = vmName;
}
/**
* Returns the path to the proactive home
*
* This method is quite expensive if
* {@link CentralPAPropertyRepository#PA_HOME} is not set. If called often,
* the value returned by this method should be set as value of PA_HOME. This
* method has no side effect.
*
*
*
* @since ProActive 5.0.0
*
* @return The value of {@link CentralPAPropertyRepository#PA_HOME} if it is
* set. Otherwise the path is computed according to the class or jar
* location.
*
* @throws ProActiveException
* If the path of the ProActive home cannot be computed or if
* the home is remote (only file and jar protocols are
* supported)
*/
public String getProActiveHome() throws ProActiveException {
if (CentralPAPropertyRepository.PA_HOME.isSet()) {
return CentralPAPropertyRepository.PA_HOME.getValue();
} else {
// Guess the location by using the classloader
final URL url = this.getClass().getResource(this.getClass().getSimpleName() + ".class");
final String path = url.getPath();
if ("jar".equals(url.getProtocol())) {
int begin = path.indexOf("file:");
int end = path.indexOf(".jar!");
if (begin != 0 || end < 0) {
throw new ProActiveException("Unable to find ProActive home. Bad jar url: " + url);
}
end = path.indexOf("dist/lib/ProActive.jar!");
if (end < 0) {
throw new ProActiveException("Unable to find ProActive home. Unexpected jar name: " + url);
}
try {
File padir = new File(new URI(path.substring(begin, end)));
return padir.getAbsolutePath();
} catch (URISyntaxException e) {
throw new ProActiveException(e);
}
} else if ("file".equals(url.getProtocol())) {
int index = path.indexOf("classes/Core/" + this.getClass().getName().replace('.', '/') +
".class");
if (index > 0) {
try {
return new File(new URI("file:" + path.substring(0, index))).getAbsolutePath();
} catch (URISyntaxException e) {
throw new ProActiveException(e);
}
} else {
throw new ProActiveException(
"Unable to find ProActive home. Running from class files but non standard repository layout");
}
} else {
throw new ProActiveException("Unable to find ProActive home. Unspported protocol: " + url);
}
}
}
}
|
PROACTIVE-1271 - Wrong path in ProActiveRuntimeImpl.getProActiveHome() on Windows
added getCanonicalPath to simplify the path returned by getProActiveHome()
Change-Id: Iaee787e85577c7e390d5d70c38fbd1a7bac7c26c
|
src/Core/org/objectweb/proactive/core/runtime/ProActiveRuntimeImpl.java
|
PROACTIVE-1271 - Wrong path in ProActiveRuntimeImpl.getProActiveHome() on Windows added getCanonicalPath to simplify the path returned by getProActiveHome()
|
|
Java
|
lgpl-2.1
|
be7a31a5f3848c74de9ce06806db7d8b43bae35e
| 0
|
joval/jOVAL
|
// Copyright (C) 2012 jOVAL.org. All rights reserved.
// This software is licensed under the AGPL 3.0 license available at http://www.joval.org/agpl_v3.txt
package org.joval.xml;
import java.io.StringWriter;
import javax.xml.bind.JAXBException;
import javax.xml.bind.JAXBIntrospector;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.joval.intf.xml.ITransformable;
import org.joval.util.JOVALMsg;
/**
* Utility for working with XML DOM.
*
* @author David A. Solin
* @version %I% %G%
*/
public class DOMTools {
/**
* Transform a JAXB object into a W3C Node.
*/
public static Element toElement(ITransformable source) throws Exception {
TransformerFactory xf = XSLTools.XSLVersion.V1.getFactory();
Transformer transformer = xf.newTransformer();
DOMResult result = new DOMResult();
//
// There's some bug in the Java transformer that makes it unsafe when thrashed statically by multiple
// threads (even though that should work just fine) -- a ConcurrentModificationException is generated
// internally, leading to a TransformerException.
//
// So, if this happens, we just retry after waiting a millisecond. But if it keeps happening after a
// thousand attempts, we give up.
//
TransformerException te = null;
for(int i=0; i < 1000; i++) {
try {
transformer.transform(source.getSource(), result);
return ((Document)result.getNode()).getDocumentElement();
} catch (TransformerException e) {
te = e;
if (e.getCause() instanceof java.util.ConcurrentModificationException) {
try {
Thread.sleep(1);
} catch (InterruptedException ie) {
}
} else {
throw te;
}
}
}
throw te;
}
/**
* Get the XML namespace of the specified ITransformable's root node.
*/
public static String getNamespace(ITransformable source) {
try {
JAXBIntrospector ji = source.getJAXBContext().createJAXBIntrospector();
return ji.getElementName(source.getRootObject()).getNamespaceURI();
} catch (JAXBException e) {
return null;
}
}
/**
* Convert the specified XML node into a pretty String (very useful for debugging).
*/
public static String toString(Node node) throws Exception {
TransformerFactory xf = XSLTools.XSLVersion.V1.getFactory();
Transformer transformer = xf.newTransformer();
StringWriter buff = new StringWriter();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
transformer.transform(new DOMSource(node), new StreamResult(buff));
return buff.toString();
}
}
|
src/org/joval/xml/DOMTools.java
|
// Copyright (C) 2012 jOVAL.org. All rights reserved.
// This software is licensed under the AGPL 3.0 license available at http://www.joval.org/agpl_v3.txt
package org.joval.xml;
import java.io.StringWriter;
import javax.xml.bind.JAXBException;
import javax.xml.bind.JAXBIntrospector;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.joval.intf.xml.ITransformable;
import org.joval.util.JOVALMsg;
/**
* Utility for working with XML DOM.
*
* @author David A. Solin
* @version %I% %G%
*/
public class DOMTools {
/**
* Transform a JAXB object into a W3C Node.
*/
public static Element toElement(ITransformable source) throws Exception {
TransformerFactory xf = XSLTools.XSLVersion.V1.getFactory();
Transformer transformer = xf.newTransformer();
DOMResult result = new DOMResult();
transformer.transform(source.getSource(), result);
return ((Document)result.getNode()).getDocumentElement();
}
/**
* Get the XML namespace of the specified ITransformable's root node.
*/
public static String getNamespace(ITransformable source) {
try {
JAXBIntrospector ji = source.getJAXBContext().createJAXBIntrospector();
return ji.getElementName(source.getRootObject()).getNamespaceURI();
} catch (JAXBException e) {
return null;
}
}
/**
* Convert the specified XML node into a pretty String (very useful for debugging).
*/
public static String toString(Node node) throws Exception {
TransformerFactory xf = XSLTools.XSLVersion.V1.getFactory();
Transformer transformer = xf.newTransformer();
StringWriter buff = new StringWriter();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
transformer.transform(new DOMSource(node), new StreamResult(buff));
return buff.toString();
}
}
|
Work-around JDK bug.
|
src/org/joval/xml/DOMTools.java
|
Work-around JDK bug.
|
|
Java
|
lgpl-2.1
|
547430c1b6984c8577f7823f232470ea58baae0d
| 0
|
fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui,fjalvingh/domui
|
/*
* DomUI Java User Interface library
* Copyright (c) 2010 by Frits Jalvingh, Itris B.V.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* See the "sponsors" file for a list of supporters.
*
* The latest version of DomUI and related code, support and documentation
* can be found at http://www.domui.org/
* The contact for the project is Frits Jalvingh <jal@etc.to>.
*/
package to.etc.domui.component.tbl;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import to.etc.domui.component.buttons.SmallImgButton;
import to.etc.domui.component.misc.IIconRef;
import to.etc.domui.component.misc.Icon;
import to.etc.domui.dom.css.DisplayType;
import to.etc.domui.dom.html.Button;
import to.etc.domui.dom.html.Div;
import to.etc.domui.dom.html.IClicked;
import to.etc.domui.dom.html.Span;
import to.etc.function.IExecute;
import to.etc.domui.util.Msgs;
import to.etc.webapp.nls.BundleRef;
import to.etc.webapp.nls.IBundleCode;
import java.util.ArrayList;
import java.util.List;
/**
* Datapager using buttons and a page number list.
*
*
* @author <a href="mailto:jal@etc.to">Frits Jalvingh</a>
* Created on Jan 2, 2019
*/
final public class DataPager2 extends Div implements IDataTablePager {
private Button m_prevBtn;
private Button m_nextBtn;
private PageableTabularComponentBase< ? > m_table;
/** When set (default) this shows selection details when a table has a selectable model. */
private boolean m_showSelection = true;
private Div m_buttonContainer;
private Div m_buttonDiv = new Div();
@NonNull
private List<SmallImgButton> m_extraButtonList = new ArrayList<>();
public DataPager2(final PageableTabularComponentBase< ? > tbl) {
m_table = tbl;
tbl.addChangeListener(this);
}
@Override
public void createContent() throws Exception {
addCssClass("ui-dp2");
m_buttonContainer = new Div("ui-dp2-bc");
add(m_buttonContainer);
Div bd = m_buttonDiv = new Div("ui-dp2-buttons");
m_buttonContainer.add(bd);
m_prevBtn = appendButton(bd, Msgs.uiPagerPrev, () -> {
int cp = m_table.getCurrentPage();
if(cp <= 0)
return;
m_table.setCurrentPage(cp - 1);
});
//-- Last part
m_nextBtn = appendButton(bd, Msgs.uiPagerNext, () -> {
int cp = m_table.getCurrentPage();
int mx = m_table.getPageCount();
cp++;
if(cp >= mx)
return;
m_table.setCurrentPage(cp);
});
redraw();
}
private Button appendButton(Div bd, IBundleCode code, IExecute x) {
Button b = new Button("ui-dp2-btn");
b.setClicked(clickednode -> x.execute());
bd.add(b);
b.add(code.getString());
return b;
}
@Nullable
private ISelectableTableComponent< ? > getSelectableTable() {
if(m_table instanceof ISelectableTableComponent< ? >)
return m_table;
return null;
}
@Nullable
private ISelectionModel< ? > getSelectionModel() {
ISelectableTableComponent< ? > stm = getSelectableTable();
if(null == stm)
return null;
return stm.getSelectionModel();
}
/**
* Return T if the "show selection UI" button should be visible.
*/
private boolean isNeedSelectionButton() throws Exception {
ISelectionModel< ? > sm = getSelectionModel();
if(sm == null || !m_showSelection)
return false;
if(!sm.isMultiSelect())
return false;
ISelectableTableComponent< ? > tc = getSelectableTable();
if(null == tc)
throw new IllegalStateException("Null selectable table?");
if(tc.isMultiSelectionVisible())
return false;
return tc.getModel() != null && tc.getModel().getRows() != 0;
}
@Override
public void selectionUIChanged(@NonNull TableModelTableBase< ? > tbl) throws Exception {
redraw();
}
/*--------------------------------------------------------------*/
/* CODING: Handle changes to the table. */
/*--------------------------------------------------------------*/
private void redraw() throws Exception {
if(! isBuilt())
return;
Div bd = m_buttonDiv;
int np = m_table.getPageCount();
if(np <= 1) {
m_buttonContainer.setDisplay(DisplayType.NONE);
return;
}
m_buttonContainer.setDisplay(DisplayType.BLOCK);
int cp = m_table.getCurrentPage();
if(np == 0) {
setDisplay(DisplayType.NONE);
return;
}
setDisplay(DisplayType.BLOCK);
if(cp <= 0) {
m_prevBtn.setDisabled(true);
} else {
m_prevBtn.setDisabled(false);
}
if(cp + 1 >= np) {
m_nextBtn.setDisabled(true);
} else {
m_nextBtn.setDisabled(false);
}
bd.removeAllChildren();
bd.add(m_prevBtn);
/*
* render page numbers. The basic group is: 3 at the start, 3 at the end, 5 in the middle, unless we have <= 10 pages
* in which case we render all.
*
* 1 2 3 ... n-2 n-1 n n+1 n+2 ... np-2 np-1 np
*/
if(np <= 10) {
renderButtons(0, 0, 10);
} else {
int ci = renderButtons(0, 0, 3); // First 3 buttons
if(ci < np) {
//-- do we have a middle range?
int ms = cp - 2;
if(ms < ci)
ms = ci;
int me = cp + 3; // exclusive bound
if(me > np)
me = np;
if(ms < me) {
if(ci < ms)
bd.add(Icon.faEllipsisH.createNode().css("ui-dp2-ellipsis"));
ci = ms;
//bd.add(" ... ");
ci = renderButtons(ci, ms, me);
}
//-- Now do the end range, if applicable
if(ci < np) {
ms = np - 2;
if(ms < ci)
ms = ci;
if(ci < ms)
bd.add(Icon.faEllipsisH.createNode().css("ui-dp2-ellipsis"));
ci = ms;
ci = renderButtons(ci, ms, np);
}
}
}
bd.add(m_nextBtn);
for(@NonNull SmallImgButton sib : m_extraButtonList) {
bd.add(sib);
sib.addCssClass("ui-dp2-btn");
}
Span reco = new Span();
reco.addCssClass("ui-dp2-nurec");
reco.add(Msgs.uiPagerRecordCount.format(m_table.getResultCount()));
bd.add(reco);
if(m_table.isTruncated()) {
Div node = new Div("ui-dp2-trunc");
bd.add(node);
node.setTitle(Msgs.uiPagerOverflow2.getString());
}
}
private int renderButtons(int ci, int from, int to) throws Exception {
int np = m_table.getPageCount();
for(int i = from; i < to; i++) {
Button b;
if(ci >= np)
break;
if(ci == m_table.getCurrentPage()) {
b = new Button("ui-dp2-btn ui-dp2-pn ui-dp2-cp");
} else {
b = new Button("ui-dp2-btn ui-dp2-pn");
}
b.add(Integer.toString(ci + 1));
final int morons = ci;
b.setClicked(clickednode -> m_table.setCurrentPage(morons));
m_buttonDiv.add(b);
ci++;
}
return ci;
}
// private void redrawSelectionButtons() throws Exception {
// //-- Show/hide the "show selection" button
// final ISelectableTableComponent<Object> dt = (ISelectableTableComponent<Object>) getSelectableTable();
// if(null == dt)
// throw new IllegalStateException("Null selectable table?");
//
// if(isNeedSelectionButton()) {
// if(m_showSelectionBtn == null) {
// m_showSelectionBtn = new SmallImgButton(Icon.of("THEME/dpr-select-on.png"));
// m_buttonDiv.add(4, m_showSelectionBtn); // Always after last navigation button
// m_showSelectionBtn.setClicked(new IClicked<NodeBase>() {
// @Override
// public void clicked(@NonNull NodeBase clickednode) throws Exception {
// dt.setShowSelection(true);
// clickednode.remove();
// m_showSelectionBtn = null;
// }
// });
// m_showSelectionBtn.setTitle(Msgs.BUNDLE.getString("ui.dpr.selections"));
// }
// } else {
// if(m_showSelectionBtn != null) {
// m_showSelectionBtn.remove();
// m_showSelectionBtn = null;
// }
// }
// }
public Div getButtonDiv() {
return m_buttonDiv;
}
public void addButton(IIconRef image, final IClicked<DataPager2> click, final BundleRef bundle, final String ttlkey) {
SmallImgButton i = new SmallImgButton(image, (IClicked<SmallImgButton>) b -> click.clicked(DataPager2.this));
if(bundle != null)
i.setTitle(bundle.getString(ttlkey));
else if(ttlkey != null)
i.setTitle(ttlkey);
getButtonDiv().add(i);
}
/*--------------------------------------------------------------*/
/* CODING: DataTableChangeListener implementation. */
/*--------------------------------------------------------------*/
@Override
public void modelChanged(final @NonNull TableModelTableBase< ? > tbl, final @Nullable ITableModel< ? > old, final @Nullable ITableModel< ? > nw) throws Exception {
forceRebuild(); // jal See bugzilla 7383: table queries done twice
m_buttonDiv = null; // Odd thing indicating that control is unbuilt, apparently
//redraw();
}
@Override
public void pageChanged(final @NonNull TableModelTableBase< ? > tbl) throws Exception {
redraw();
}
@Override
public boolean isShowSelection() {
return m_showSelection;
}
@Override
public void setShowSelection(boolean showSelection) {
if(m_showSelection == showSelection)
return;
m_showSelection = showSelection;
forceRebuild();
}
@Override
public void addButton(@NonNull SmallImgButton sib) {
m_extraButtonList.add(sib);
forceRebuild();
}
public void addButton(@NonNull IIconRef img, @NonNull IClicked<SmallImgButton> clicked) {
addButton(new SmallImgButton(img, clicked));
}
}
|
to.etc.domui/src/main/java/to/etc/domui/component/tbl/DataPager2.java
|
/*
* DomUI Java User Interface library
* Copyright (c) 2010 by Frits Jalvingh, Itris B.V.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* See the "sponsors" file for a list of supporters.
*
* The latest version of DomUI and related code, support and documentation
* can be found at http://www.domui.org/
* The contact for the project is Frits Jalvingh <jal@etc.to>.
*/
package to.etc.domui.component.tbl;
import org.eclipse.jdt.annotation.NonNull;
import org.eclipse.jdt.annotation.Nullable;
import to.etc.domui.component.buttons.SmallImgButton;
import to.etc.domui.component.misc.IIconRef;
import to.etc.domui.component.misc.Icon;
import to.etc.domui.dom.css.DisplayType;
import to.etc.domui.dom.html.Button;
import to.etc.domui.dom.html.Div;
import to.etc.domui.dom.html.IClicked;
import to.etc.domui.dom.html.Span;
import to.etc.function.IExecute;
import to.etc.domui.util.Msgs;
import to.etc.webapp.nls.BundleRef;
import to.etc.webapp.nls.IBundleCode;
import java.util.ArrayList;
import java.util.List;
/**
* Datapager using buttons and a page number list.
*
*
* @author <a href="mailto:jal@etc.to">Frits Jalvingh</a>
* Created on Jan 2, 2019
*/
final public class DataPager2 extends Div implements IDataTablePager {
private Button m_prevBtn;
private Button m_nextBtn;
private PageableTabularComponentBase< ? > m_table;
/** When set (default) this shows selection details when a table has a selectable model. */
private boolean m_showSelection = true;
private Div m_buttonContainer;
private Div m_buttonDiv = new Div();
@NonNull
private List<SmallImgButton> m_extraButtonList = new ArrayList<>();
public DataPager2(final PageableTabularComponentBase< ? > tbl) {
m_table = tbl;
tbl.addChangeListener(this);
}
@Override
public void createContent() throws Exception {
addCssClass("ui-dp2");
m_buttonContainer = new Div("ui-dp2-bc");
add(m_buttonContainer);
Div bd = m_buttonDiv = new Div("ui-dp2-buttons");
m_buttonContainer.add(bd);
m_prevBtn = appendButton(bd, Msgs.uiPagerPrev, () -> {
int cp = m_table.getCurrentPage();
if(cp <= 0)
return;
m_table.setCurrentPage(cp - 1);
});
//-- Last part
m_nextBtn = appendButton(bd, Msgs.uiPagerNext, () -> {
int cp = m_table.getCurrentPage();
int mx = m_table.getPageCount();
cp++;
if(cp >= mx)
return;
m_table.setCurrentPage(cp);
});
redraw();
}
private Button appendButton(Div bd, IBundleCode code, IExecute x) {
Button b = new Button("ui-dp2-btn");
b.setClicked(clickednode -> x.execute());
bd.add(b);
b.add(code.getString());
return b;
}
@Nullable
private ISelectableTableComponent< ? > getSelectableTable() {
if(m_table instanceof ISelectableTableComponent< ? >)
return m_table;
return null;
}
@Nullable
private ISelectionModel< ? > getSelectionModel() {
ISelectableTableComponent< ? > stm = getSelectableTable();
if(null == stm)
return null;
return stm.getSelectionModel();
}
/**
* Return T if the "show selection UI" button should be visible.
*/
private boolean isNeedSelectionButton() throws Exception {
ISelectionModel< ? > sm = getSelectionModel();
if(sm == null || !m_showSelection)
return false;
if(!sm.isMultiSelect())
return false;
ISelectableTableComponent< ? > tc = getSelectableTable();
if(null == tc)
throw new IllegalStateException("Null selectable table?");
if(tc.isMultiSelectionVisible())
return false;
return tc.getModel() != null && tc.getModel().getRows() != 0;
}
@Override
public void selectionUIChanged(@NonNull TableModelTableBase< ? > tbl) throws Exception {
redraw();
}
/*--------------------------------------------------------------*/
/* CODING: Handle changes to the table. */
/*--------------------------------------------------------------*/
private void redraw() throws Exception {
if(! isBuilt())
return;
Div bd = m_buttonDiv;
int np = m_table.getPageCount();
if(np <= 1) {
m_buttonContainer.setDisplay(DisplayType.NONE);
return;
}
m_buttonContainer.setDisplay(DisplayType.BLOCK);
int cp = m_table.getCurrentPage();
if(np == 0) {
setDisplay(DisplayType.NONE);
return;
}
setDisplay(DisplayType.BLOCK);
if(cp <= 0) {
m_prevBtn.setDisabled(true);
} else {
m_prevBtn.setDisabled(false);
}
if(cp + 1 >= np) {
m_nextBtn.setDisabled(true);
} else {
m_nextBtn.setDisabled(false);
}
bd.removeAllChildren();
bd.add(m_prevBtn);
/*
* render page numbers. The basic group is: 3 at the start, 3 at the end, 5 in the middle, unless we have <= 10 pages
* in which case we render all.
*
* 1 2 3 ... n-2 n-1 n n+1 n+2 ... np-2 np-1 np
*/
if(np <= 10) {
renderButtons(0, 0, 10);
} else {
int ci = renderButtons(0, 0, 3); // First 3 buttons
if(ci < np) {
//-- do we have a middle range?
int ms = cp - 2;
if(ms < ci)
ms = ci;
int me = cp + 3; // exclusive bound
if(me > np)
me = np;
if(ms < me) {
if(ci < ms)
bd.add(Icon.faEllipsisH.createNode().css("ui-dp2-ellipsis"));
ci = ms;
//bd.add(" ... ");
ci = renderButtons(ci, ms, me);
}
//-- Now do the end range, if applicable
if(ci < np) {
ms = np - 2;
if(ms < ci)
ms = ci;
if(ci < ms)
bd.add(Icon.faEllipsisH.createNode().css("ui-dp2-ellipsis"));
ci = ms;
ci = renderButtons(ci, ms, np);
}
}
}
bd.add(m_nextBtn);
for(@NonNull SmallImgButton sib : m_extraButtonList) {
bd.add(sib);
}
Span reco = new Span();
reco.addCssClass("ui-dp2-nurec");
reco.add(Msgs.uiPagerRecordCount.format(m_table.getResultCount()));
bd.add(reco);
if(m_table.isTruncated()) {
Div node = new Div("ui-dp2-trunc");
bd.add(node);
node.setTitle(Msgs.uiPagerOverflow2.getString());
}
}
private int renderButtons(int ci, int from, int to) throws Exception {
int np = m_table.getPageCount();
for(int i = from; i < to; i++) {
Button b;
if(ci >= np)
break;
if(ci == m_table.getCurrentPage()) {
b = new Button("ui-dp2-btn ui-dp2-pn ui-dp2-cp");
} else {
b = new Button("ui-dp2-btn ui-dp2-pn");
}
b.add(Integer.toString(ci + 1));
final int morons = ci;
b.setClicked(clickednode -> m_table.setCurrentPage(morons));
m_buttonDiv.add(b);
ci++;
}
return ci;
}
// private void redrawSelectionButtons() throws Exception {
// //-- Show/hide the "show selection" button
// final ISelectableTableComponent<Object> dt = (ISelectableTableComponent<Object>) getSelectableTable();
// if(null == dt)
// throw new IllegalStateException("Null selectable table?");
//
// if(isNeedSelectionButton()) {
// if(m_showSelectionBtn == null) {
// m_showSelectionBtn = new SmallImgButton(Icon.of("THEME/dpr-select-on.png"));
// m_buttonDiv.add(4, m_showSelectionBtn); // Always after last navigation button
// m_showSelectionBtn.setClicked(new IClicked<NodeBase>() {
// @Override
// public void clicked(@NonNull NodeBase clickednode) throws Exception {
// dt.setShowSelection(true);
// clickednode.remove();
// m_showSelectionBtn = null;
// }
// });
// m_showSelectionBtn.setTitle(Msgs.BUNDLE.getString("ui.dpr.selections"));
// }
// } else {
// if(m_showSelectionBtn != null) {
// m_showSelectionBtn.remove();
// m_showSelectionBtn = null;
// }
// }
// }
public Div getButtonDiv() {
return m_buttonDiv;
}
public void addButton(IIconRef image, final IClicked<DataPager2> click, final BundleRef bundle, final String ttlkey) {
SmallImgButton i = new SmallImgButton(image, (IClicked<SmallImgButton>) b -> click.clicked(DataPager2.this));
if(bundle != null)
i.setTitle(bundle.getString(ttlkey));
else if(ttlkey != null)
i.setTitle(ttlkey);
getButtonDiv().add(i);
}
/*--------------------------------------------------------------*/
/* CODING: DataTableChangeListener implementation. */
/*--------------------------------------------------------------*/
@Override
public void modelChanged(final @NonNull TableModelTableBase< ? > tbl, final @Nullable ITableModel< ? > old, final @Nullable ITableModel< ? > nw) throws Exception {
forceRebuild(); // jal See bugzilla 7383: table queries done twice
m_buttonDiv = null; // Odd thing indicating that control is unbuilt, apparently
//redraw();
}
@Override
public void pageChanged(final @NonNull TableModelTableBase< ? > tbl) throws Exception {
redraw();
}
@Override
public boolean isShowSelection() {
return m_showSelection;
}
@Override
public void setShowSelection(boolean showSelection) {
if(m_showSelection == showSelection)
return;
m_showSelection = showSelection;
forceRebuild();
}
@Override
public void addButton(@NonNull SmallImgButton sib) {
m_extraButtonList.add(sib);
forceRebuild();
}
public void addButton(@NonNull IIconRef img, @NonNull IClicked<SmallImgButton> clicked) {
addButton(new SmallImgButton(img, clicked));
}
}
|
Bugfix: DataPager buttons that were added would not have spacing and touched the other buttons
|
to.etc.domui/src/main/java/to/etc/domui/component/tbl/DataPager2.java
|
Bugfix: DataPager buttons that were added would not have spacing and touched the other buttons
|
|
Java
|
lgpl-2.1
|
61a3145c3c907505be425c6338f733d8488137c0
| 0
|
languagetool-org/languagetool,jimregan/languagetool,languagetool-org/languagetool,jimregan/languagetool,jimregan/languagetool,jimregan/languagetool,languagetool-org/languagetool,languagetool-org/languagetool,languagetool-org/languagetool,jimregan/languagetool
|
/* LanguageTool, a natural language style checker
* Copyright (C) 2016 Daniel Naber (http://www.danielnaber.de)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package org.languagetool.server;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.sun.net.httpserver.HttpExchange;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.languagetool.*;
import org.languagetool.language.LanguageIdentifier;
import org.languagetool.markup.AnnotatedText;
import org.languagetool.markup.AnnotatedTextBuilder;
import org.languagetool.rules.CategoryId;
import org.languagetool.rules.DictionaryMatchFilter;
import org.languagetool.rules.RemoteRule;
import org.languagetool.rules.RuleMatch;
import org.languagetool.rules.bitext.BitextRule;
import org.languagetool.rules.spelling.morfologik.suggestions_ordering.SuggestionsOrdererConfig;
import org.languagetool.tools.Tools;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* @since 3.4
*/
abstract class TextChecker {
private static final int PINGS_CLEAN_MILLIS = 60 * 1000; // internal pings database will be cleaned this often
private static final int PINGS_MAX_SIZE = 5000;
private static final int NGRAM_THRESHOLD = 50;
protected abstract void setHeaders(HttpExchange httpExchange);
protected abstract String getResponse(AnnotatedText text, Language language, DetectedLanguage lang, Language motherTongue, List<RuleMatch> matches,
List<RuleMatch> hiddenMatches, String incompleteResultReason, int compactMode, boolean showPremiumHint);
@NotNull
protected abstract List<String> getPreferredVariants(Map<String, String> parameters);
protected abstract DetectedLanguage getLanguage(String text, Map<String, String> parameters, List<String> preferredVariants,
List<String> additionalDetectLangs, List<String> preferredLangs, boolean testMode);
protected abstract boolean getLanguageAutoDetect(Map<String, String> parameters);
@NotNull
protected abstract List<String> getEnabledRuleIds(Map<String, String> parameters);
@NotNull
protected abstract List<String> getDisabledRuleIds(Map<String, String> parameters);
protected static final int CONTEXT_SIZE = 40; // characters
protected static final int NUM_PIPELINES_PER_SETTING = 3; // for prewarming
protected final HTTPServerConfig config;
private static final Logger logger = LoggerFactory.getLogger(TextChecker.class);
private static final String ENCODING = "UTF-8";
private static final int CACHE_STATS_PRINT = 500; // print cache stats every n cache requests
private final Map<String,Integer> languageCheckCounts = new HashMap<>();
private final Queue<Runnable> workQueue;
private final RequestCounter reqCounter;
// keep track of timeouts of the hidden matches server, check health periodically;
// -1 => healthy, else => check timed out at given date, check back if time difference > config.getHiddenMatchesFailTimeout()
private long lastHiddenMatchesServerTimeout;
// counter; mark as down if this reaches hidenMatchesServerFall
private long hiddenMatchesServerFailures = 0;
private final LanguageIdentifier fastTextIdentifier;
private final ExecutorService executorService;
private final ResultCache cache;
private final DatabaseLogger databaseLogger;
private final Long logServerId;
private final Random random = new Random();
private final Set<DatabasePingLogEntry> pings = new HashSet<>();
private long pingsCleanDateMillis = System.currentTimeMillis();
private LanguageIdentifier ngramIdentifier = null;
PipelinePool pipelinePool; // mocked in test -> package-private / not final
TextChecker(HTTPServerConfig config, boolean internalServer, Queue<Runnable> workQueue, RequestCounter reqCounter) {
this.config = config;
this.workQueue = workQueue;
this.reqCounter = reqCounter;
this.fastTextIdentifier = new LanguageIdentifier();
this.fastTextIdentifier.enableFasttext(config.getFasttextBinary(), config.getFasttextModel());
if (config.getNgramLangIdentData() != null) {
this.ngramIdentifier = new LanguageIdentifier();
this.ngramIdentifier.enableNgrams(config.getNgramLangIdentData());
}
this.executorService = Executors.newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat("lt-textchecker-thread-%d").build());
this.cache = config.getCacheSize() > 0 ? new ResultCache(
config.getCacheSize(), config.getCacheTTLSeconds(), TimeUnit.SECONDS) : null;
this.databaseLogger = DatabaseLogger.getInstance();
if (databaseLogger.isLogging()) {
this.logServerId = DatabaseAccess.getInstance().getOrCreateServerId();
} else {
this.logServerId = null;
}
ServerMetricsCollector.getInstance().logHiddenServerConfiguration(config.getHiddenMatchesServer() != null);
if (cache != null) {
ServerMetricsCollector.getInstance().monitorCache("languagetool_matches_cache", cache.getMatchesCache());
ServerMetricsCollector.getInstance().monitorCache("languagetool_remote_matches_cache", cache.getRemoteMatchesCache());
ServerMetricsCollector.getInstance().monitorCache("languagetool_sentences_cache", cache.getSentenceCache());
ServerMetricsCollector.getInstance().monitorCache("languagetool_remote_matches_cache", cache.getRemoteMatchesCache());
}
pipelinePool = new PipelinePool(config, cache, internalServer);
if (config.isPipelinePrewarmingEnabled()) {
logger.info("Prewarming pipelines...");
prewarmPipelinePool();
logger.info("Prewarming finished.");
}
if (config.getAbTest() != null) {
UserConfig.enableABTests();
logger.info("A/B-Test enabled: " + config.getAbTest());
if (config.getAbTest().equals("SuggestionsOrderer")) {
SuggestionsOrdererConfig.setMLSuggestionsOrderingEnabled(true);
}
}
}
private void prewarmPipelinePool() {
// setting + number of pipelines
// typical addon settings at the moment (2018-11-05)
Map<PipelinePool.PipelineSettings, Integer> prewarmSettings = new HashMap<>();
List<Language> prewarmLanguages = Stream.of(
"de-DE", "en-US", "en-GB", "pt-BR", "ru-RU", "es", "it", "fr", "pl-PL", "uk-UA")
.map(Languages::getLanguageForShortCode)
.collect(Collectors.toList());
List<String> addonDisabledRules = Collections.singletonList("WHITESPACE_RULE");
List<JLanguageTool.Mode> addonModes = Arrays.asList(JLanguageTool.Mode.TEXTLEVEL_ONLY, JLanguageTool.Mode.ALL_BUT_TEXTLEVEL_ONLY);
UserConfig user = new UserConfig();
for (Language language : prewarmLanguages) {
for (JLanguageTool.Mode mode : addonModes) {
QueryParams params = new QueryParams(Collections.emptyList(), Collections.emptyList(), addonDisabledRules,
Collections.emptyList(), Collections.emptyList(), false, true,
false, false, false, mode, JLanguageTool.Level.DEFAULT, null);
PipelinePool.PipelineSettings settings = new PipelinePool.PipelineSettings(language, null, params, config.globalConfig, user);
prewarmSettings.put(settings, NUM_PIPELINES_PER_SETTING);
PipelinePool.PipelineSettings settingsMotherTongueEqual = new PipelinePool.PipelineSettings(language, language, params, config.globalConfig, user);
PipelinePool.PipelineSettings settingsMotherTongueEnglish = new PipelinePool.PipelineSettings(language,
Languages.getLanguageForName("English"), params, config.globalConfig, user);
prewarmSettings.put(settingsMotherTongueEqual, NUM_PIPELINES_PER_SETTING);
prewarmSettings.put(settingsMotherTongueEnglish, NUM_PIPELINES_PER_SETTING);
}
}
try {
for (Map.Entry<PipelinePool.PipelineSettings, Integer> prewarmSetting : prewarmSettings.entrySet()) {
int numPipelines = prewarmSetting.getValue();
PipelinePool.PipelineSettings setting = prewarmSetting.getKey();
// request n pipelines first, return all afterwards -> creates multiple for same setting
List<Pipeline> pipelines = new ArrayList<>();
for (int i = 0; i < numPipelines; i++) {
Pipeline p = pipelinePool.getPipeline(setting);
p.check("LanguageTool");
pipelines.add(p);
}
for (Pipeline p : pipelines) {
pipelinePool.returnPipeline(setting, p);
}
}
} catch (Exception e) {
throw new RuntimeException("Error while prewarming pipelines", e);
}
}
void shutdownNow() {
executorService.shutdownNow();
RemoteRule.shutdown();
}
void checkText(AnnotatedText aText, HttpExchange httpExchange, Map<String, String> parameters, ErrorRequestLimiter errorRequestLimiter,
String remoteAddress) throws Exception {
checkParams(parameters);
long timeStart = System.currentTimeMillis();
UserLimits limits = ServerTools.getUserLimits(parameters, config);
// logging information
String agent = parameters.get("useragent") != null ? parameters.get("useragent") : "-";
Long agentId = null, userId = null;
if (databaseLogger.isLogging()) {
DatabaseAccess db = DatabaseAccess.getInstance();
agentId = db.getOrCreateClientId(parameters.get("useragent"));
userId = limits.getPremiumUid();
}
String referrer = httpExchange.getRequestHeaders().getFirst("Referer");
String userAgent = httpExchange.getRequestHeaders().getFirst("User-Agent");
if (aText.getPlainText().length() > limits.getMaxTextLength()) {
String msg = "limit: " + limits.getMaxTextLength() + ", size: " + aText.getPlainText().length();
databaseLogger.log(new DatabaseAccessLimitLogEntry("MaxCharacterSizeExceeded", logServerId, agentId, userId, msg, referrer, userAgent));
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.MAX_TEXT_SIZE);
throw new TextTooLongException("Your text exceeds the limit of " + limits.getMaxTextLength() +
" characters (it's " + aText.getPlainText().length() + " characters). Please submit a shorter text.");
}
boolean filterDictionaryMatches = "true".equals(parameters.get("filterDictionaryMatches"));
Long textSessionId = null;
try {
if (parameters.containsKey("textSessionId")) {
String textSessionIdStr = parameters.get("textSessionId");
if (textSessionIdStr.startsWith("user:")) {
int sepPos = textSessionIdStr.indexOf(':');
String sessionId = textSessionIdStr.substring(sepPos + 1);
textSessionId = Long.valueOf(sessionId);
} else if (textSessionIdStr.contains(":")) { // transitioning to new format used in chrome addon
// format: "{random number in 0..99999}:{unix time}"
long random, timestamp;
int sepPos = textSessionIdStr.indexOf(':');
random = Long.parseLong(textSessionIdStr.substring(0, sepPos));
timestamp = Long.parseLong(textSessionIdStr.substring(sepPos + 1));
// use random number to choose a slice in possible range of values
// then choose position in slice by timestamp
long maxRandom = 100000;
long randomSegmentSize = (Long.MAX_VALUE - maxRandom) / maxRandom;
long segmentOffset = random * randomSegmentSize;
if (timestamp > randomSegmentSize) {
logger.warn(String.format("Could not transform textSessionId '%s'", textSessionIdStr));
}
textSessionId = segmentOffset + timestamp;
} else {
textSessionId = Long.valueOf(textSessionIdStr);
}
}
} catch (NumberFormatException ex) {
logger.warn("Could not parse textSessionId '" + parameters.get("textSessionId") + "' as long: " + ex.getMessage());
}
String abTest = null;
if (agent != null && config.getAbTestClients() != null && config.getAbTestClients().matcher(agent).matches()) {
boolean testRolledOut;
// partial rollout; deterministic if textSessionId given to make testing easier
if (textSessionId != null) {
testRolledOut = textSessionId % 100 < config.getAbTestRollout();
} else {
testRolledOut = random.nextInt(100) < config.getAbTestRollout();
}
if (testRolledOut) {
abTest = config.getAbTest();
}
}
UserConfig userConfig = new UserConfig(
limits.getPremiumUid() != null ? getUserDictWords(limits.getPremiumUid()) : Collections.emptyList(),
getRuleValues(parameters), config.getMaxSpellingSuggestions(), null, null, filterDictionaryMatches,
abTest, textSessionId);
//print("Check start: " + text.length() + " chars, " + langParam);
boolean autoDetectLanguage = getLanguageAutoDetect(parameters);
List<String> preferredVariants = getPreferredVariants(parameters);
if (parameters.get("noopLanguages") != null && !autoDetectLanguage) {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST);
throw new IllegalArgumentException("You can specify 'noopLanguages' only when also using 'language=auto'");
}
List<String> noopLangs = parameters.get("noopLanguages") != null ?
Arrays.asList(parameters.get("noopLanguages").split(",")) : Collections.emptyList();
List<String> preferredLangs = parameters.get("preferredLanguages") != null ?
Arrays.asList(parameters.get("preferredLanguages").split(",")) : Collections.emptyList();
DetectedLanguage detLang = getLanguage(aText.getPlainText(), parameters, preferredVariants, noopLangs, preferredLangs,
parameters.getOrDefault("ld", "control").equalsIgnoreCase("test"));
Language lang = detLang.getGivenLanguage();
// == temporary counting code ======================================
/*
if (httpExchange.getRequestHeaders() != null && httpExchange.getRequestHeaders().get("Accept-Language") != null) {
List<String> langs = httpExchange.getRequestHeaders().get("Accept-Language");
if (langs.size() > 0) {
String[] split = langs.get(0).split(",");
if (split.length > 0 && detLang.getDetectedLanguage() != null && detLang.getDetectedLanguage().getShortCode().equals("en")) {
int theCount1 = StringUtils.countMatches(aText.toString(), " the ");
int theCount2 = StringUtils.countMatches(aText.toString(), "The ");
String browserLang = split[0];
System.out.println("STAT\t" + detLang.getDetectedLanguage().getShortCode() + "\t" + detLang.getDetectionConfidence() + "\t" + aText.toString().length() + "\t" + browserLang + "\t" + theCount1 + "\t" + theCount2);
}
}
}
*/
// ========================================
Integer count = languageCheckCounts.get(lang.getShortCodeWithCountryAndVariant());
if (count == null) {
count = 1;
} else {
count++;
}
//print("Starting check: " + aText.getPlainText().length() + " chars, #" + count);
String motherTongueParam = parameters.get("motherTongue");
Language motherTongue = motherTongueParam != null ? Languages.getLanguageForShortCode(motherTongueParam) : null;
boolean useEnabledOnly = "yes".equals(parameters.get("enabledOnly")) || "true".equals(parameters.get("enabledOnly"));
List<Language> altLanguages = new ArrayList<>();
if (parameters.get("altLanguages") != null) {
String[] altLangParams = parameters.get("altLanguages").split(",\\s*");
for (String langCode : altLangParams) {
Language altLang = Languages.getLanguageForShortCode(langCode);
altLanguages.add(altLang);
if (altLang.hasVariant() && !altLang.isVariant()) {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST);
throw new IllegalArgumentException("You specified altLanguage '" + langCode + "', but for this language you need to specify a variant, e.g. 'en-GB' instead of just 'en'");
}
}
}
List<String> enabledRules = getEnabledRuleIds(parameters);
List<String> disabledRules = getDisabledRuleIds(parameters);
List<CategoryId> enabledCategories = getCategoryIds("enabledCategories", parameters);
List<CategoryId> disabledCategories = getCategoryIds("disabledCategories", parameters);
if ((disabledRules.size() > 0 || disabledCategories.size() > 0) && useEnabledOnly) {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST);
throw new IllegalArgumentException("You cannot specify disabled rules or categories using enabledOnly=true");
}
if (enabledRules.isEmpty() && enabledCategories.isEmpty() && useEnabledOnly) {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST);
throw new IllegalArgumentException("You must specify enabled rules or categories when using enabledOnly=true");
}
boolean enableTempOffRules = "true".equals(parameters.get("enableTempOffRules"));
boolean useQuerySettings = enabledRules.size() > 0 || disabledRules.size() > 0 ||
enabledCategories.size() > 0 || disabledCategories.size() > 0 || enableTempOffRules;
boolean allowIncompleteResults = "true".equals(parameters.get("allowIncompleteResults"));
boolean enableHiddenRules = "true".equals(parameters.get("enableHiddenRules"));
JLanguageTool.Mode mode = ServerTools.getMode(parameters);
JLanguageTool.Level level = ServerTools.getLevel(parameters);
String callback = parameters.get("callback");
// allowed to log input on errors?
boolean inputLogging = !parameters.getOrDefault("inputLogging", "").equals("no");
QueryParams params = new QueryParams(altLanguages, enabledRules, disabledRules,
enabledCategories, disabledCategories, useEnabledOnly,
useQuerySettings, allowIncompleteResults, enableHiddenRules, enableTempOffRules, mode, level, callback, inputLogging);
int textSize = aText.getPlainText().length();
List<RuleMatch> ruleMatchesSoFar = Collections.synchronizedList(new ArrayList<>());
Future<List<RuleMatch>> future = executorService.submit(new Callable<List<RuleMatch>>() {
@Override
public List<RuleMatch> call() throws Exception {
// use to fake OOM in thread for testing:
/*if (Math.random() < 0.1) {
throw new OutOfMemoryError();
}*/
return getRuleMatches(aText, lang, motherTongue, parameters, params, userConfig, detLang, preferredLangs, preferredVariants, f -> ruleMatchesSoFar.add(f));
}
});
String incompleteResultReason = null;
List<RuleMatch> matches;
try {
if (limits.getMaxCheckTimeMillis() < 0) {
matches = future.get();
} else {
matches = future.get(limits.getMaxCheckTimeMillis(), TimeUnit.MILLISECONDS);
}
} catch (ExecutionException e) {
future.cancel(true);
if (ExceptionUtils.getRootCause(e) instanceof ErrorRateTooHighException) {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.TOO_MANY_ERRORS);
databaseLogger.log(new DatabaseCheckErrorLogEntry("ErrorRateTooHigh", logServerId, agentId, userId, lang, detLang.getDetectedLanguage(), textSize, "matches: " + ruleMatchesSoFar.size()));
}
if (params.allowIncompleteResults && ExceptionUtils.getRootCause(e) instanceof ErrorRateTooHighException) {
logger.warn(e.getMessage() + " - returning " + ruleMatchesSoFar.size() + " matches found so far. " +
"Detected language: " + detLang + ", " + ServerTools.getLoggingInfo(remoteAddress, null, -1, httpExchange,
parameters, System.currentTimeMillis()-timeStart, reqCounter));
matches = new ArrayList<>(ruleMatchesSoFar); // threads might still be running, so make a copy
incompleteResultReason = "Results are incomplete: " + ExceptionUtils.getRootCause(e).getMessage();
} else if (e.getCause() != null && e.getCause() instanceof OutOfMemoryError) {
throw (OutOfMemoryError)e.getCause();
} else {
throw new RuntimeException(ServerTools.cleanUserTextFromMessage(e.getMessage(), parameters) + ", detected: " + detLang, e);
}
} catch (TimeoutException e) {
boolean cancelled = future.cancel(true);
Path loadFile = Paths.get("/proc/loadavg"); // works in Linux only(?)
String loadInfo = loadFile.toFile().exists() ? Files.readAllLines(loadFile).toString() : "(unknown)";
if (errorRequestLimiter != null) {
errorRequestLimiter.logAccess(remoteAddress, httpExchange.getRequestHeaders(), parameters);
}
String message = "Text checking took longer than allowed maximum of " + limits.getMaxCheckTimeMillis() +
" milliseconds (cancelled: " + cancelled +
", lang: " + lang.getShortCodeWithCountryAndVariant() +
", detected: " + detLang +
", #" + count +
", " + aText.getPlainText().length() + " characters of text" +
", mode: " + mode.toString().toLowerCase() +
", h: " + reqCounter.getHandleCount() + ", r: " + reqCounter.getRequestCount() + ", system load: " + loadInfo + ")";
if (params.allowIncompleteResults) {
logger.info(message + " - returning " + ruleMatchesSoFar.size() + " matches found so far");
matches = new ArrayList<>(ruleMatchesSoFar); // threads might still be running, so make a copy
incompleteResultReason = "Results are incomplete: text checking took longer than allowed maximum of " +
String.format(Locale.ENGLISH, "%.2f", limits.getMaxCheckTimeMillis()/1000.0) + " seconds";
} else {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.MAX_CHECK_TIME);
databaseLogger.log(new DatabaseCheckErrorLogEntry("MaxCheckTimeExceeded",
logServerId, agentId, limits.getPremiumUid(), lang, detLang.getDetectedLanguage(), textSize, "load: "+ loadInfo));
throw new RuntimeException(message, e);
}
}
setHeaders(httpExchange);
List<RuleMatch> hiddenMatches = new ArrayList<>();
if (config.getHiddenMatchesServer() != null && params.enableHiddenRules &&
config.getHiddenMatchesLanguages().contains(lang)) {
if (config.getHiddenMatchesServerFailTimeout() > 0 && lastHiddenMatchesServerTimeout != -1 &&
System.currentTimeMillis() - lastHiddenMatchesServerTimeout < config.getHiddenMatchesServerFailTimeout()) {
ServerMetricsCollector.getInstance().logHiddenServerStatus(false);
ServerMetricsCollector.getInstance().logHiddenServerRequest(false);
logger.warn("Warn: Skipped querying hidden matches server at " +
config.getHiddenMatchesServer() + " because of recent error/timeout (timeout=" + config.getHiddenMatchesServerFailTimeout() + "ms).");
} else {
ResultExtender resultExtender = new ResultExtender(config.getHiddenMatchesServer(), config.getHiddenMatchesServerTimeout());
try {
long start = System.currentTimeMillis();
List<RemoteRuleMatch> extensionMatches = resultExtender.getExtensionMatches(aText.getPlainText(), parameters);
hiddenMatches = resultExtender.getFilteredExtensionMatches(matches, extensionMatches);
long end = System.currentTimeMillis();
logger.info("Hidden matches: " + extensionMatches.size() + " -> " + hiddenMatches.size() + " in " + (end - start) + "ms for " + lang.getShortCodeWithCountryAndVariant());
ServerMetricsCollector.getInstance().logHiddenServerStatus(true);
lastHiddenMatchesServerTimeout = -1;
hiddenMatchesServerFailures = 0;
ServerMetricsCollector.getInstance().logHiddenServerRequest(true);
} catch (Exception e) {
ServerMetricsCollector.getInstance().logHiddenServerRequest(false);
hiddenMatchesServerFailures++;
if (hiddenMatchesServerFailures >= config.getHiddenMatchesServerFall()) {
ServerMetricsCollector.getInstance().logHiddenServerStatus(false);
logger.warn("Failed to query hidden matches server at " + config.getHiddenMatchesServer() + ": " + e.getClass() + ": " + e.getMessage() + ", input was " + aText.getPlainText().length() + " characters - marked as down now");
lastHiddenMatchesServerTimeout = System.currentTimeMillis();
} else {
logger.warn("Failed to query hidden matches server at " + config.getHiddenMatchesServer() + ": " + e.getClass() + ": " + e.getMessage() + ", input was " + aText.getPlainText().length() + " characters - " + (config.getHiddenMatchesServerFall() - hiddenMatchesServerFailures) + " errors until marked as down");
}
}
}
}
int compactMode = Integer.parseInt(parameters.getOrDefault("c", "0"));
String response = getResponse(aText, lang, detLang, motherTongue, matches, hiddenMatches, incompleteResultReason, compactMode, limits.getPremiumUid() == null);
if (params.callback != null) {
// JSONP - still needed today for the special case of hosting your own on-premise LT without SSL
// and using it from a local MS Word (not Online Word) - issue #89 in the add-in repo:
response = params.callback + "(" + response + ");";
}
String messageSent = "sent";
String languageMessage = lang.getShortCodeWithCountryAndVariant();
try {
httpExchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, response.getBytes(ENCODING).length);
httpExchange.getResponseBody().write(response.getBytes(ENCODING));
ServerMetricsCollector.getInstance().logResponse(HttpURLConnection.HTTP_OK);
} catch (IOException exception) {
// the client is disconnected
messageSent = "notSent: " + exception.getMessage();
}
if (motherTongue != null) {
languageMessage += " (mother tongue: " + motherTongue.getShortCodeWithCountryAndVariant() + ")";
}
if (autoDetectLanguage) {
languageMessage += "[auto]";
}
languageCheckCounts.put(lang.getShortCodeWithCountryAndVariant(), count);
int computationTime = (int) (System.currentTimeMillis() - timeStart);
String version = parameters.get("v") != null ? ", v:" + parameters.get("v") : "";
String skipLimits = limits.getSkipLimits() ? ", skipLimits" : "";
logger.info("Check done: " + aText.getPlainText().length() + " chars, " + languageMessage + ", #" + count + ", " + referrer + ", "
+ matches.size() + " matches, "
+ computationTime + "ms, agent:" + agent + version
+ ", " + messageSent + ", q:" + (workQueue != null ? workQueue.size() : "?")
+ ", h:" + reqCounter.getHandleCount() + ", dH:" + reqCounter.getDistinctIps()
+ ", m:" + mode.toString().toLowerCase() + skipLimits);
int matchCount = matches.size();
Map<String, Integer> ruleMatchCount = new HashMap<>();
for (RuleMatch match : matches) {
String ruleId = match.getRule().getId();
ruleMatchCount.put(ruleId, ruleMatchCount.getOrDefault(ruleId, 0) + 1);
}
ServerMetricsCollector.getInstance().logCheck(
lang, computationTime, textSize, matchCount, mode);
if (!config.isSkipLoggingChecks()) {
DatabaseCheckLogEntry logEntry = new DatabaseCheckLogEntry(userId, agentId, logServerId, textSize, matchCount,
lang, detLang.getDetectedLanguage(), computationTime, textSessionId, mode.toString());
logEntry.setRuleMatches(new DatabaseRuleMatchLogEntry(
config.isSkipLoggingRuleMatches() ? Collections.emptyMap() : ruleMatchCount));
databaseLogger.log(logEntry);
}
if (databaseLogger.isLogging()) {
if (System.currentTimeMillis() - pingsCleanDateMillis > PINGS_CLEAN_MILLIS && pings.size() < PINGS_MAX_SIZE) {
logger.info("Cleaning pings DB (" + pings.size() + " items)");
pings.clear();
pingsCleanDateMillis = System.currentTimeMillis();
}
if (agentId != null && userId != null) {
DatabasePingLogEntry ping = new DatabasePingLogEntry(agentId, userId);
if (!pings.contains(ping)) {
databaseLogger.log(ping);
if (pings.size() >= PINGS_MAX_SIZE) {
// prevent pings taking up unlimited amounts of memory
logger.warn("Pings DB has reached max size: " + pings.size());
} else {
pings.add(ping);
}
}
}
}
}
private Map<String, Integer> getRuleValues(Map<String, String> parameters) {
Map<String, Integer> ruleValues = new HashMap<>();
String parameterString = parameters.get("ruleValues");
if (parameterString == null) {
return ruleValues;
}
String[] pairs = parameterString.split("[,]");
for (String pair : pairs) {
String[] ruleAndValue = pair.split("[:]");
ruleValues.put(ruleAndValue[0], Integer.parseInt(ruleAndValue[1]));
}
return ruleValues;
}
private List<String> getUserDictWords(Long userId) {
DatabaseAccess db = DatabaseAccess.getInstance();
return db.getUserDictWords(userId);
}
protected void checkParams(Map<String, String> parameters) {
if (parameters.get("text") == null && parameters.get("data") == null) {
throw new IllegalArgumentException("Missing 'text' or 'data' parameter");
}
}
private List<RuleMatch> getRuleMatches(AnnotatedText aText, Language lang,
Language motherTongue, Map<String, String> parameters,
QueryParams params, UserConfig userConfig,
DetectedLanguage detLang,
List<String> preferredLangs, List<String> preferredVariants,
RuleMatchListener listener) throws Exception {
if (cache != null && cache.requestCount() > 0 && cache.requestCount() % CACHE_STATS_PRINT == 0) {
double hitRate = cache.hitRate();
String hitPercentage = String.format(Locale.ENGLISH, "%.2f", hitRate * 100.0f);
logger.info("Cache stats: " + hitPercentage + "% hit rate");
//print("Matches : " + cache.getMatchesCache().stats().hitRate() + " hit rate");
//print("Sentences : " + cache.getSentenceCache().stats().hitRate() + " hit rate");
//print("Size : " + cache.getMatchesCache().size() + " (matches cache), " + cache.getSentenceCache().size() + " (sentence cache)");
//logger.log(new DatabaseCacheStatsLogEntry(logServerId, (float) hitRate));
}
if (parameters.get("sourceText") != null) {
if (parameters.get("sourceLanguage") == null) {
throw new IllegalArgumentException("'sourceLanguage' parameter missing - must be set when 'sourceText' is set");
}
Language sourceLanguage = Languages.getLanguageForShortCode(parameters.get("sourceLanguage"));
JLanguageTool sourceLt = new JLanguageTool(sourceLanguage);
JLanguageTool targetLt = new JLanguageTool(lang);
if (userConfig.filterDictionaryMatches()) {
targetLt.addMatchFilter(new DictionaryMatchFilter(userConfig));
}
List<BitextRule> bitextRules = Tools.getBitextRules(sourceLanguage, lang);
return Tools.checkBitext(parameters.get("sourceText"), aText.getPlainText(), sourceLt, targetLt, bitextRules);
} else {
List<RuleMatch> matches = new ArrayList<>();
if (preferredLangs.size() < 2 || parameters.get("multilingual") == null || parameters.get("multilingual").equals("false")) {
matches.addAll(getPipelineResults(aText, lang, motherTongue, params, userConfig, listener));
} else {
// support for multilingual texts:
try {
Language mainLang = getLanguageVariantForCode(detLang.getDetectedLanguage().getShortCode(), preferredVariants);
List<Language> secondLangs = new ArrayList<>();
for (String preferredLangCode : preferredLangs) {
if (!preferredLangCode.equals(mainLang.getShortCode())) {
secondLangs.add(getLanguageVariantForCode(preferredLangCode, preferredVariants));
break;
}
}
LanguageAnnotator annotator = new LanguageAnnotator();
List<FragmentWithLanguage> fragments = annotator.detectLanguages(aText.getPlainText(), mainLang, secondLangs);
List<Language> langs = new ArrayList<>();
langs.add(mainLang);
langs.addAll(secondLangs);
Map<Language, AnnotatedTextBuilder> lang2builder = getBuilderMap(fragments, new HashSet<>(langs));
for (Map.Entry<Language, AnnotatedTextBuilder> entry : lang2builder.entrySet()) {
matches.addAll(getPipelineResults(entry.getValue().build(), entry.getKey(), motherTongue, params, userConfig, listener));
}
} catch (Exception e) {
logger.error("Problem with multilingual mode (preferredLangs=" + preferredLangs+ ", preferredVariants=" + preferredVariants + "), " +
"falling back to single language.", e);
matches.addAll(getPipelineResults(aText, lang, motherTongue, params, userConfig, listener));
}
}
return matches;
}
}
private Language getLanguageVariantForCode(String langCode, List<String> preferredVariants) {
for (String preferredVariant : preferredVariants) {
if (preferredVariant.startsWith(langCode + "-")) {
return Languages.getLanguageForShortCode(preferredVariant);
}
}
return Languages.getLanguageForShortCode(langCode);
}
private List<RuleMatch> getPipelineResults(AnnotatedText aText, Language lang, Language motherTongue, QueryParams params, UserConfig userConfig, RuleMatchListener listener) throws Exception {
PipelinePool.PipelineSettings settings = null;
Pipeline lt = null;
List<RuleMatch> matches = new ArrayList<>();
try {
settings = new PipelinePool.PipelineSettings(lang, motherTongue, params, config.globalConfig, userConfig);
lt = pipelinePool.getPipeline(settings);
Long textSessionId = userConfig.getTextSessionId();
if (params.regressionTestMode) {
textSessionId = -2L; // magic value for remote rule roll-out - includes all results, even from disabled models
}
matches.addAll(lt.check(aText, true, JLanguageTool.ParagraphHandling.NORMAL, listener,
params.mode, params.level, executorService, textSessionId));
} finally {
if (lt != null) {
pipelinePool.returnPipeline(settings, lt);
}
}
return matches;
}
@NotNull
private Map<Language, AnnotatedTextBuilder> getBuilderMap(List<FragmentWithLanguage> fragments, Set<Language> maybeUsedLangs) {
Map<Language, AnnotatedTextBuilder> lang2builder = new HashMap<>();
for (Language usedLang : maybeUsedLangs) {
if (!lang2builder.containsKey(usedLang)) {
lang2builder.put(usedLang, new AnnotatedTextBuilder());
}
AnnotatedTextBuilder atb = lang2builder.get(usedLang);
for (FragmentWithLanguage fragment : fragments) {
if (usedLang.getShortCodeWithCountryAndVariant().equals(fragment.getLangCode())) {
atb.addText(fragment.getFragment());
} else {
atb.addMarkup(fragment.getFragment()); // markup = ignore this text
}
}
}
return lang2builder;
}
@NotNull
private List<CategoryId> getCategoryIds(String paramName, Map<String, String> parameters) {
List<String> stringIds = getCommaSeparatedStrings(paramName, parameters);
List<CategoryId> ids = new ArrayList<>();
for (String stringId : stringIds) {
ids.add(new CategoryId(stringId));
}
return ids;
}
@NotNull
protected List<String> getCommaSeparatedStrings(String paramName, Map<String, String> parameters) {
String disabledParam = parameters.get(paramName);
List<String> result = new ArrayList<>();
if (disabledParam != null) {
result.addAll(Arrays.asList(disabledParam.split(",")));
}
return result;
}
DetectedLanguage detectLanguageOfString(String text, String fallbackLanguage, List<String> preferredVariants,
List<String> noopLangs, List<String> preferredLangs, boolean testMode) {
DetectedLanguage detected;
String mode;
long t1 = System.nanoTime();
if (ngramIdentifier != null && text.length() < NGRAM_THRESHOLD) {
detected = ngramIdentifier.detectLanguage(text, noopLangs, preferredLangs);
mode = "ngram";
} else {
detected = fastTextIdentifier.detectLanguage(text, noopLangs, preferredLangs);
mode = fastTextIdentifier.isFastTextEnabled() ? "fasttext" : "built-in";
}
long t2 = System.nanoTime();
float runTime = (t2-t1)/1000.0f/1000.0f;
//System.out.printf(Locale.ENGLISH, "detected " + detected + " using " + mode + " in %.2fms for %d chars\n", runTime, text.length());
Language lang;
if (detected == null) {
lang = Languages.getLanguageForShortCode(fallbackLanguage != null ? fallbackLanguage : "en");
} else {
lang = detected.getDetectedLanguage();
}
if (preferredVariants.size() > 0) {
for (String preferredVariant : preferredVariants) {
if (!preferredVariant.contains("-")) {
throw new IllegalArgumentException("Invalid format for 'preferredVariants', expected a dash as in 'en-GB': '" + preferredVariant + "'");
}
String preferredVariantLang = preferredVariant.split("-")[0];
if (preferredVariantLang.equals(lang.getShortCode())) {
lang = Languages.getLanguageForShortCode(preferredVariant);
if (lang == null) {
throw new IllegalArgumentException("Invalid 'preferredVariants', no such language/variant found: '" + preferredVariant + "'");
}
}
}
} else {
if (lang.getDefaultLanguageVariant() != null) {
lang = lang.getDefaultLanguageVariant();
}
}
return new DetectedLanguage(null, lang, detected != null ? detected.getDetectionConfidence() : 0f);
}
static class QueryParams {
final List<Language> altLanguages;
final List<String> enabledRules;
final List<String> disabledRules;
final List<CategoryId> enabledCategories;
final List<CategoryId> disabledCategories;
final boolean useEnabledOnly;
final boolean useQuerySettings;
final boolean allowIncompleteResults;
final boolean enableHiddenRules;
final boolean enableTempOffRules;
final JLanguageTool.Mode mode;
final JLanguageTool.Level level;
final String callback;
/** allowed to log input with stack traces to reproduce errors? */
final boolean inputLogging;
final boolean regressionTestMode; // no fallbacks for remote rules, retries, enable all rules
QueryParams(List<Language> altLanguages, List<String> enabledRules, List<String> disabledRules, List<CategoryId> enabledCategories, List<CategoryId> disabledCategories,
boolean useEnabledOnly, boolean useQuerySettings, boolean allowIncompleteResults, boolean enableHiddenRules, boolean enableTempOffRules, JLanguageTool.Mode mode, JLanguageTool.Level level, @Nullable String callback) {
this(altLanguages, enabledRules, disabledRules, enabledCategories, disabledCategories, useEnabledOnly, useQuerySettings, allowIncompleteResults, enableHiddenRules, enableTempOffRules, mode, level, callback, true);
}
QueryParams(List<Language> altLanguages, List<String> enabledRules, List<String> disabledRules, List<CategoryId> enabledCategories, List<CategoryId> disabledCategories,
boolean useEnabledOnly, boolean useQuerySettings, boolean allowIncompleteResults, boolean enableHiddenRules, boolean enableTempOffRules, JLanguageTool.Mode mode, JLanguageTool.Level level, @Nullable String callback, boolean inputLogging) {
this.altLanguages = Objects.requireNonNull(altLanguages);
this.enabledRules = enabledRules;
this.disabledRules = disabledRules;
this.enabledCategories = enabledCategories;
this.disabledCategories = disabledCategories;
this.useEnabledOnly = useEnabledOnly;
this.useQuerySettings = useQuerySettings;
this.allowIncompleteResults = allowIncompleteResults;
this.enableHiddenRules = enableHiddenRules;
this.enableTempOffRules = enableTempOffRules;
this.regressionTestMode = enableTempOffRules;
this.mode = Objects.requireNonNull(mode);
this.level = Objects.requireNonNull(level);
if (callback != null && !callback.matches("[a-zA-Z]+")) {
throw new IllegalArgumentException("'callback' value must match [a-zA-Z]+: '" + callback + "'");
}
this.callback = callback;
this.inputLogging = inputLogging;
}
@Override
public int hashCode() {
return new HashCodeBuilder()
.append(altLanguages)
.append(enabledRules)
.append(disabledRules)
.append(enabledCategories)
.append(disabledCategories)
.append(useEnabledOnly)
.append(useQuerySettings)
.append(allowIncompleteResults)
.append(enableHiddenRules)
.append(enableTempOffRules)
.append(regressionTestMode)
.append(mode)
.append(level)
.append(callback)
.append(inputLogging)
.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == this) return true;
if (obj == null || getClass() != obj.getClass()) {
return false;
}
QueryParams other = (QueryParams) obj;
return new EqualsBuilder()
.append(altLanguages, other.altLanguages)
.append(enabledRules, other.enabledRules)
.append(disabledRules, other.disabledRules)
.append(enabledCategories, other.enabledCategories)
.append(disabledCategories, other.disabledCategories)
.append(useEnabledOnly, other.useEnabledOnly)
.append(useQuerySettings, other.useQuerySettings)
.append(allowIncompleteResults, other.allowIncompleteResults)
.append(enableHiddenRules, other.enableHiddenRules)
.append(enableTempOffRules, other.enableTempOffRules)
.append(regressionTestMode, other.regressionTestMode)
.append(mode, other.mode)
.append(level, other.level)
.append(callback, other.callback)
.append(inputLogging, other.inputLogging)
.isEquals();
}
@Override
public String toString() {
return new ToStringBuilder(this)
.append("altLanguages", altLanguages)
.append("enabledRules", enabledRules)
.append("disabledRules", disabledRules)
.append("enabledCategories", enabledCategories)
.append("disabledCategories", disabledCategories)
.append("useEnabledOnly", useEnabledOnly)
.append("useQuerySettings", useQuerySettings)
.append("allowIncompleteResults", allowIncompleteResults)
.append("enableHiddenRules", enableHiddenRules)
.append("enableTempOffRules", enableTempOffRules)
.append("regressionTestMode", regressionTestMode)
.append("mode", mode)
.append("level", level)
.append("callback", callback)
.append("inputLogging", inputLogging)
.build();
}
}
}
|
languagetool-server/src/main/java/org/languagetool/server/TextChecker.java
|
/* LanguageTool, a natural language style checker
* Copyright (C) 2016 Daniel Naber (http://www.danielnaber.de)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package org.languagetool.server;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.sun.net.httpserver.HttpExchange;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.languagetool.*;
import org.languagetool.language.LanguageIdentifier;
import org.languagetool.markup.AnnotatedText;
import org.languagetool.markup.AnnotatedTextBuilder;
import org.languagetool.rules.CategoryId;
import org.languagetool.rules.DictionaryMatchFilter;
import org.languagetool.rules.RemoteRule;
import org.languagetool.rules.RuleMatch;
import org.languagetool.rules.bitext.BitextRule;
import org.languagetool.rules.spelling.morfologik.suggestions_ordering.SuggestionsOrdererConfig;
import org.languagetool.tools.Tools;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* @since 3.4
*/
abstract class TextChecker {
private static final int PINGS_CLEAN_MILLIS = 60 * 1000; // internal pings database will be cleaned this often
private static final int PINGS_MAX_SIZE = 5000;
private static final int NGRAM_THRESHOLD = 50;
protected abstract void setHeaders(HttpExchange httpExchange);
protected abstract String getResponse(AnnotatedText text, Language language, DetectedLanguage lang, Language motherTongue, List<RuleMatch> matches,
List<RuleMatch> hiddenMatches, String incompleteResultReason, int compactMode, boolean showPremiumHint);
@NotNull
protected abstract List<String> getPreferredVariants(Map<String, String> parameters);
protected abstract DetectedLanguage getLanguage(String text, Map<String, String> parameters, List<String> preferredVariants,
List<String> additionalDetectLangs, List<String> preferredLangs, boolean testMode);
protected abstract boolean getLanguageAutoDetect(Map<String, String> parameters);
@NotNull
protected abstract List<String> getEnabledRuleIds(Map<String, String> parameters);
@NotNull
protected abstract List<String> getDisabledRuleIds(Map<String, String> parameters);
protected static final int CONTEXT_SIZE = 40; // characters
protected static final int NUM_PIPELINES_PER_SETTING = 3; // for prewarming
protected final HTTPServerConfig config;
private static final Logger logger = LoggerFactory.getLogger(TextChecker.class);
private static final String ENCODING = "UTF-8";
private static final int CACHE_STATS_PRINT = 500; // print cache stats every n cache requests
private final Map<String,Integer> languageCheckCounts = new HashMap<>();
private final Queue<Runnable> workQueue;
private final RequestCounter reqCounter;
// keep track of timeouts of the hidden matches server, check health periodically;
// -1 => healthy, else => check timed out at given date, check back if time difference > config.getHiddenMatchesFailTimeout()
private long lastHiddenMatchesServerTimeout;
// counter; mark as down if this reaches hidenMatchesServerFall
private long hiddenMatchesServerFailures = 0;
private final LanguageIdentifier fastTextIdentifier;
private final ExecutorService executorService;
private final ResultCache cache;
private final DatabaseLogger databaseLogger;
private final Long logServerId;
private final Random random = new Random();
private final Set<DatabasePingLogEntry> pings = new HashSet<>();
private long pingsCleanDateMillis = System.currentTimeMillis();
private LanguageIdentifier ngramIdentifier = null;
PipelinePool pipelinePool; // mocked in test -> package-private / not final
TextChecker(HTTPServerConfig config, boolean internalServer, Queue<Runnable> workQueue, RequestCounter reqCounter) {
this.config = config;
this.workQueue = workQueue;
this.reqCounter = reqCounter;
this.fastTextIdentifier = new LanguageIdentifier();
this.fastTextIdentifier.enableFasttext(config.getFasttextBinary(), config.getFasttextModel());
if (config.getNgramLangIdentData() != null) {
this.ngramIdentifier = new LanguageIdentifier();
this.ngramIdentifier.enableNgrams(config.getNgramLangIdentData());
}
this.executorService = Executors.newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat("lt-textchecker-thread-%d").build());
this.cache = config.getCacheSize() > 0 ? new ResultCache(
config.getCacheSize(), config.getCacheTTLSeconds(), TimeUnit.SECONDS) : null;
this.databaseLogger = DatabaseLogger.getInstance();
if (databaseLogger.isLogging()) {
this.logServerId = DatabaseAccess.getInstance().getOrCreateServerId();
} else {
this.logServerId = null;
}
ServerMetricsCollector.getInstance().logHiddenServerConfiguration(config.getHiddenMatchesServer() != null);
if (cache != null) {
ServerMetricsCollector.getInstance().monitorCache("languagetool_matches_cache", cache.getMatchesCache());
ServerMetricsCollector.getInstance().monitorCache("languagetool_remote_matches_cache", cache.getRemoteMatchesCache());
ServerMetricsCollector.getInstance().monitorCache("languagetool_sentences_cache", cache.getSentenceCache());
ServerMetricsCollector.getInstance().monitorCache("languagetool_remote_matches_cache", cache.getRemoteMatchesCache());
}
pipelinePool = new PipelinePool(config, cache, internalServer);
if (config.isPipelinePrewarmingEnabled()) {
logger.info("Prewarming pipelines...");
prewarmPipelinePool();
logger.info("Prewarming finished.");
}
if (config.getAbTest() != null) {
UserConfig.enableABTests();
logger.info("A/B-Test enabled: " + config.getAbTest());
if (config.getAbTest().equals("SuggestionsOrderer")) {
SuggestionsOrdererConfig.setMLSuggestionsOrderingEnabled(true);
}
}
}
private void prewarmPipelinePool() {
// setting + number of pipelines
// typical addon settings at the moment (2018-11-05)
Map<PipelinePool.PipelineSettings, Integer> prewarmSettings = new HashMap<>();
List<Language> prewarmLanguages = Stream.of(
"de-DE", "en-US", "en-GB", "pt-BR", "ru-RU", "es", "it", "fr", "pl-PL", "uk-UA")
.map(Languages::getLanguageForShortCode)
.collect(Collectors.toList());
List<String> addonDisabledRules = Collections.singletonList("WHITESPACE_RULE");
List<JLanguageTool.Mode> addonModes = Arrays.asList(JLanguageTool.Mode.TEXTLEVEL_ONLY, JLanguageTool.Mode.ALL_BUT_TEXTLEVEL_ONLY);
UserConfig user = new UserConfig();
for (Language language : prewarmLanguages) {
for (JLanguageTool.Mode mode : addonModes) {
QueryParams params = new QueryParams(Collections.emptyList(), Collections.emptyList(), addonDisabledRules,
Collections.emptyList(), Collections.emptyList(), false, true,
false, false, false, mode, JLanguageTool.Level.DEFAULT, null);
PipelinePool.PipelineSettings settings = new PipelinePool.PipelineSettings(language, null, params, config.globalConfig, user);
prewarmSettings.put(settings, NUM_PIPELINES_PER_SETTING);
PipelinePool.PipelineSettings settingsMotherTongueEqual = new PipelinePool.PipelineSettings(language, language, params, config.globalConfig, user);
PipelinePool.PipelineSettings settingsMotherTongueEnglish = new PipelinePool.PipelineSettings(language,
Languages.getLanguageForName("English"), params, config.globalConfig, user);
prewarmSettings.put(settingsMotherTongueEqual, NUM_PIPELINES_PER_SETTING);
prewarmSettings.put(settingsMotherTongueEnglish, NUM_PIPELINES_PER_SETTING);
}
}
try {
for (Map.Entry<PipelinePool.PipelineSettings, Integer> prewarmSetting : prewarmSettings.entrySet()) {
int numPipelines = prewarmSetting.getValue();
PipelinePool.PipelineSettings setting = prewarmSetting.getKey();
// request n pipelines first, return all afterwards -> creates multiple for same setting
List<Pipeline> pipelines = new ArrayList<>();
for (int i = 0; i < numPipelines; i++) {
Pipeline p = pipelinePool.getPipeline(setting);
p.check("LanguageTool");
pipelines.add(p);
}
for (Pipeline p : pipelines) {
pipelinePool.returnPipeline(setting, p);
}
}
} catch (Exception e) {
throw new RuntimeException("Error while prewarming pipelines", e);
}
}
void shutdownNow() {
executorService.shutdownNow();
RemoteRule.shutdown();
}
void checkText(AnnotatedText aText, HttpExchange httpExchange, Map<String, String> parameters, ErrorRequestLimiter errorRequestLimiter,
String remoteAddress) throws Exception {
checkParams(parameters);
long timeStart = System.currentTimeMillis();
UserLimits limits = ServerTools.getUserLimits(parameters, config);
// logging information
String agent = parameters.get("useragent") != null ? parameters.get("useragent") : "-";
Long agentId = null, userId = null;
if (databaseLogger.isLogging()) {
DatabaseAccess db = DatabaseAccess.getInstance();
agentId = db.getOrCreateClientId(parameters.get("useragent"));
userId = limits.getPremiumUid();
}
String referrer = httpExchange.getRequestHeaders().getFirst("Referer");
String userAgent = httpExchange.getRequestHeaders().getFirst("User-Agent");
if (aText.getPlainText().length() > limits.getMaxTextLength()) {
String msg = "limit: " + limits.getMaxTextLength() + ", size: " + aText.getPlainText().length();
databaseLogger.log(new DatabaseAccessLimitLogEntry("MaxCharacterSizeExceeded", logServerId, agentId, userId, msg, referrer, userAgent));
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.MAX_TEXT_SIZE);
throw new TextTooLongException("Your text exceeds the limit of " + limits.getMaxTextLength() +
" characters (it's " + aText.getPlainText().length() + " characters). Please submit a shorter text.");
}
boolean filterDictionaryMatches = "true".equals(parameters.get("filterDictionaryMatches"));
Long textSessionId = null;
try {
if (parameters.containsKey("textSessionId")) {
String textSessionIdStr = parameters.get("textSessionId");
if (textSessionIdStr.startsWith("user:")) {
int sepPos = textSessionIdStr.indexOf(':');
String sessionId = textSessionIdStr.substring(sepPos + 1);
textSessionId = Long.valueOf(sessionId);
} else if (textSessionIdStr.contains(":")) { // transitioning to new format used in chrome addon
// format: "{random number in 0..99999}:{unix time}"
long random, timestamp;
int sepPos = textSessionIdStr.indexOf(':');
random = Long.parseLong(textSessionIdStr.substring(0, sepPos));
timestamp = Long.parseLong(textSessionIdStr.substring(sepPos + 1));
// use random number to choose a slice in possible range of values
// then choose position in slice by timestamp
long maxRandom = 100000;
long randomSegmentSize = (Long.MAX_VALUE - maxRandom) / maxRandom;
long segmentOffset = random * randomSegmentSize;
if (timestamp > randomSegmentSize) {
logger.warn(String.format("Could not transform textSessionId '%s'", textSessionIdStr));
}
textSessionId = segmentOffset + timestamp;
} else {
textSessionId = Long.valueOf(textSessionIdStr);
}
}
} catch (NumberFormatException ex) {
logger.warn("Could not parse textSessionId '" + parameters.get("textSessionId") + "' as long: " + ex.getMessage());
}
String abTest = null;
if (agent != null && config.getAbTestClients() != null && config.getAbTestClients().matcher(agent).matches()) {
boolean testRolledOut;
// partial rollout; deterministic if textSessionId given to make testing easier
if (textSessionId != null) {
testRolledOut = textSessionId % 100 < config.getAbTestRollout();
} else {
testRolledOut = random.nextInt(100) < config.getAbTestRollout();
}
if (testRolledOut) {
abTest = config.getAbTest();
}
}
UserConfig userConfig = new UserConfig(
limits.getPremiumUid() != null ? getUserDictWords(limits.getPremiumUid()) : Collections.emptyList(),
getRuleValues(parameters), config.getMaxSpellingSuggestions(), null, null, filterDictionaryMatches,
abTest, textSessionId);
//print("Check start: " + text.length() + " chars, " + langParam);
boolean autoDetectLanguage = getLanguageAutoDetect(parameters);
List<String> preferredVariants = getPreferredVariants(parameters);
if (parameters.get("noopLanguages") != null && !autoDetectLanguage) {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST);
throw new IllegalArgumentException("You can specify 'noopLanguages' only when also using 'language=auto'");
}
List<String> noopLangs = parameters.get("noopLanguages") != null ?
Arrays.asList(parameters.get("noopLanguages").split(",")) : Collections.emptyList();
List<String> preferredLangs = parameters.get("preferredLanguages") != null ?
Arrays.asList(parameters.get("preferredLanguages").split(",")) : Collections.emptyList();
DetectedLanguage detLang = getLanguage(aText.getPlainText(), parameters, preferredVariants, noopLangs, preferredLangs,
parameters.getOrDefault("ld", "control").equalsIgnoreCase("test"));
Language lang = detLang.getGivenLanguage();
// == temporary counting code ======================================
/*
if (httpExchange.getRequestHeaders() != null && httpExchange.getRequestHeaders().get("Accept-Language") != null) {
List<String> langs = httpExchange.getRequestHeaders().get("Accept-Language");
if (langs.size() > 0) {
String[] split = langs.get(0).split(",");
if (split.length > 0 && detLang.getDetectedLanguage() != null && detLang.getDetectedLanguage().getShortCode().equals("en")) {
int theCount1 = StringUtils.countMatches(aText.toString(), " the ");
int theCount2 = StringUtils.countMatches(aText.toString(), "The ");
String browserLang = split[0];
System.out.println("STAT\t" + detLang.getDetectedLanguage().getShortCode() + "\t" + detLang.getDetectionConfidence() + "\t" + aText.toString().length() + "\t" + browserLang + "\t" + theCount1 + "\t" + theCount2);
}
}
}
*/
// ========================================
Integer count = languageCheckCounts.get(lang.getShortCodeWithCountryAndVariant());
if (count == null) {
count = 1;
} else {
count++;
}
//print("Starting check: " + aText.getPlainText().length() + " chars, #" + count);
String motherTongueParam = parameters.get("motherTongue");
Language motherTongue = motherTongueParam != null ? Languages.getLanguageForShortCode(motherTongueParam) : null;
boolean useEnabledOnly = "yes".equals(parameters.get("enabledOnly")) || "true".equals(parameters.get("enabledOnly"));
List<Language> altLanguages = new ArrayList<>();
if (parameters.get("altLanguages") != null) {
String[] altLangParams = parameters.get("altLanguages").split(",\\s*");
for (String langCode : altLangParams) {
Language altLang = Languages.getLanguageForShortCode(langCode);
altLanguages.add(altLang);
if (altLang.hasVariant() && !altLang.isVariant()) {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST);
throw new IllegalArgumentException("You specified altLanguage '" + langCode + "', but for this language you need to specify a variant, e.g. 'en-GB' instead of just 'en'");
}
}
}
List<String> enabledRules = getEnabledRuleIds(parameters);
List<String> disabledRules = getDisabledRuleIds(parameters);
List<CategoryId> enabledCategories = getCategoryIds("enabledCategories", parameters);
List<CategoryId> disabledCategories = getCategoryIds("disabledCategories", parameters);
if ((disabledRules.size() > 0 || disabledCategories.size() > 0) && useEnabledOnly) {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST);
throw new IllegalArgumentException("You cannot specify disabled rules or categories using enabledOnly=true");
}
if (enabledRules.isEmpty() && enabledCategories.isEmpty() && useEnabledOnly) {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.INVALID_REQUEST);
throw new IllegalArgumentException("You must specify enabled rules or categories when using enabledOnly=true");
}
boolean enableTempOffRules = "true".equals(parameters.get("enableTempOffRules"));
boolean useQuerySettings = enabledRules.size() > 0 || disabledRules.size() > 0 ||
enabledCategories.size() > 0 || disabledCategories.size() > 0 || enableTempOffRules;
boolean allowIncompleteResults = "true".equals(parameters.get("allowIncompleteResults"));
boolean enableHiddenRules = "true".equals(parameters.get("enableHiddenRules"));
JLanguageTool.Mode mode = ServerTools.getMode(parameters);
JLanguageTool.Level level = ServerTools.getLevel(parameters);
String callback = parameters.get("callback");
// allowed to log input on errors?
boolean inputLogging = !parameters.getOrDefault("inputLogging", "").equals("no");
QueryParams params = new QueryParams(altLanguages, enabledRules, disabledRules,
enabledCategories, disabledCategories, useEnabledOnly,
useQuerySettings, allowIncompleteResults, enableHiddenRules, enableTempOffRules, mode, level, callback, inputLogging);
int textSize = aText.getPlainText().length();
List<RuleMatch> ruleMatchesSoFar = Collections.synchronizedList(new ArrayList<>());
Future<List<RuleMatch>> future = executorService.submit(new Callable<List<RuleMatch>>() {
@Override
public List<RuleMatch> call() throws Exception {
// use to fake OOM in thread for testing:
/*if (Math.random() < 0.1) {
throw new OutOfMemoryError();
}*/
return getRuleMatches(aText, lang, motherTongue, parameters, params, userConfig, detLang, preferredLangs, preferredVariants, f -> ruleMatchesSoFar.add(f));
}
});
String incompleteResultReason = null;
List<RuleMatch> matches;
try {
if (limits.getMaxCheckTimeMillis() < 0) {
matches = future.get();
} else {
matches = future.get(limits.getMaxCheckTimeMillis(), TimeUnit.MILLISECONDS);
}
} catch (ExecutionException e) {
future.cancel(true);
if (ExceptionUtils.getRootCause(e) instanceof ErrorRateTooHighException) {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.TOO_MANY_ERRORS);
databaseLogger.log(new DatabaseCheckErrorLogEntry("ErrorRateTooHigh", logServerId, agentId, userId, lang, detLang.getDetectedLanguage(), textSize, "matches: " + ruleMatchesSoFar.size()));
}
if (params.allowIncompleteResults && ExceptionUtils.getRootCause(e) instanceof ErrorRateTooHighException) {
logger.warn(e.getMessage() + " - returning " + ruleMatchesSoFar.size() + " matches found so far. " +
"Detected language: " + detLang + ", " + ServerTools.getLoggingInfo(remoteAddress, null, -1, httpExchange,
parameters, System.currentTimeMillis()-timeStart, reqCounter));
matches = new ArrayList<>(ruleMatchesSoFar); // threads might still be running, so make a copy
incompleteResultReason = "Results are incomplete: " + ExceptionUtils.getRootCause(e).getMessage();
} else if (e.getCause() != null && e.getCause() instanceof OutOfMemoryError) {
throw (OutOfMemoryError)e.getCause();
} else {
throw new RuntimeException(ServerTools.cleanUserTextFromMessage(e.getMessage(), parameters) + ", detected: " + detLang, e);
}
} catch (TimeoutException e) {
boolean cancelled = future.cancel(true);
Path loadFile = Paths.get("/proc/loadavg"); // works in Linux only(?)
String loadInfo = loadFile.toFile().exists() ? Files.readAllLines(loadFile).toString() : "(unknown)";
if (errorRequestLimiter != null) {
errorRequestLimiter.logAccess(remoteAddress, httpExchange.getRequestHeaders(), parameters);
}
String message = "Text checking took longer than allowed maximum of " + limits.getMaxCheckTimeMillis() +
" milliseconds (cancelled: " + cancelled +
", lang: " + lang.getShortCodeWithCountryAndVariant() +
", detected: " + detLang +
", #" + count +
", " + aText.getPlainText().length() + " characters of text" +
", mode: " + mode.toString().toLowerCase() +
", h: " + reqCounter.getHandleCount() + ", r: " + reqCounter.getRequestCount() + ", system load: " + loadInfo + ")";
if (params.allowIncompleteResults) {
logger.info(message + " - returning " + ruleMatchesSoFar.size() + " matches found so far");
matches = new ArrayList<>(ruleMatchesSoFar); // threads might still be running, so make a copy
incompleteResultReason = "Results are incomplete: text checking took longer than allowed maximum of " +
String.format(Locale.ENGLISH, "%.2f", limits.getMaxCheckTimeMillis()/1000.0) + " seconds";
} else {
ServerMetricsCollector.getInstance().logRequestError(ServerMetricsCollector.RequestErrorType.MAX_CHECK_TIME);
databaseLogger.log(new DatabaseCheckErrorLogEntry("MaxCheckTimeExceeded",
logServerId, agentId, limits.getPremiumUid(), lang, detLang.getDetectedLanguage(), textSize, "load: "+ loadInfo));
throw new RuntimeException(message, e);
}
}
setHeaders(httpExchange);
List<RuleMatch> hiddenMatches = new ArrayList<>();
if (config.getHiddenMatchesServer() != null && params.enableHiddenRules &&
config.getHiddenMatchesLanguages().contains(lang)) {
if (config.getHiddenMatchesServerFailTimeout() > 0 && lastHiddenMatchesServerTimeout != -1 &&
System.currentTimeMillis() - lastHiddenMatchesServerTimeout < config.getHiddenMatchesServerFailTimeout()) {
ServerMetricsCollector.getInstance().logHiddenServerStatus(false);
ServerMetricsCollector.getInstance().logHiddenServerRequest(false);
logger.warn("Warn: Skipped querying hidden matches server at " +
config.getHiddenMatchesServer() + " because of recent error/timeout (timeout=" + config.getHiddenMatchesServerFailTimeout() + "ms).");
} else {
ResultExtender resultExtender = new ResultExtender(config.getHiddenMatchesServer(), config.getHiddenMatchesServerTimeout());
try {
long start = System.currentTimeMillis();
List<RemoteRuleMatch> extensionMatches = resultExtender.getExtensionMatches(aText.getPlainText(), parameters);
hiddenMatches = resultExtender.getFilteredExtensionMatches(matches, extensionMatches);
long end = System.currentTimeMillis();
logger.info("Hidden matches: " + extensionMatches.size() + " -> " + hiddenMatches.size() + " in " + (end - start) + "ms for " + lang.getShortCodeWithCountryAndVariant());
ServerMetricsCollector.getInstance().logHiddenServerStatus(true);
lastHiddenMatchesServerTimeout = -1;
hiddenMatchesServerFailures = 0;
ServerMetricsCollector.getInstance().logHiddenServerRequest(true);
} catch (Exception e) {
ServerMetricsCollector.getInstance().logHiddenServerRequest(false);
hiddenMatchesServerFailures++;
if (hiddenMatchesServerFailures >= config.getHiddenMatchesServerFall()) {
ServerMetricsCollector.getInstance().logHiddenServerStatus(false);
logger.warn("Failed to query hidden matches server at " + config.getHiddenMatchesServer() + ": " + e.getClass() + ": " + e.getMessage() + ", input was " + aText.getPlainText().length() + " characters - marked as down now");
lastHiddenMatchesServerTimeout = System.currentTimeMillis();
} else {
logger.warn("Failed to query hidden matches server at " + config.getHiddenMatchesServer() + ": " + e.getClass() + ": " + e.getMessage() + ", input was " + aText.getPlainText().length() + " characters - " + (config.getHiddenMatchesServerFall() - hiddenMatchesServerFailures) + " errors until marked as down");
}
}
}
}
int compactMode = Integer.parseInt(parameters.getOrDefault("c", "0"));
String response = getResponse(aText, lang, detLang, motherTongue, matches, hiddenMatches, incompleteResultReason, compactMode, limits.getPremiumUid() == null);
if (params.callback != null) {
// JSONP - still needed today for the special case of hosting your own on-premise LT without SSL
// and using it from a local MS Word (not Online Word) - issue #89 in the add-in repo:
response = params.callback + "(" + response + ");";
}
String messageSent = "sent";
String languageMessage = lang.getShortCodeWithCountryAndVariant();
try {
httpExchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, response.getBytes(ENCODING).length);
httpExchange.getResponseBody().write(response.getBytes(ENCODING));
ServerMetricsCollector.getInstance().logResponse(HttpURLConnection.HTTP_OK);
} catch (IOException exception) {
// the client is disconnected
messageSent = "notSent: " + exception.getMessage();
}
if (motherTongue != null) {
languageMessage += " (mother tongue: " + motherTongue.getShortCodeWithCountryAndVariant() + ")";
}
if (autoDetectLanguage) {
languageMessage += "[auto]";
}
languageCheckCounts.put(lang.getShortCodeWithCountryAndVariant(), count);
int computationTime = (int) (System.currentTimeMillis() - timeStart);
String version = parameters.get("v") != null ? ", v:" + parameters.get("v") : "";
String skipLimits = limits.getSkipLimits() ? ", skipLimits" : "";
logger.info("Check done: " + aText.getPlainText().length() + " chars, " + languageMessage + ", #" + count + ", " + referrer + ", "
+ matches.size() + " matches, "
+ computationTime + "ms, agent:" + agent + version
+ ", " + messageSent + ", q:" + (workQueue != null ? workQueue.size() : "?")
+ ", h:" + reqCounter.getHandleCount() + ", dH:" + reqCounter.getDistinctIps()
+ ", m:" + mode.toString().toLowerCase() + skipLimits);
int matchCount = matches.size();
Map<String, Integer> ruleMatchCount = new HashMap<>();
for (RuleMatch match : matches) {
String ruleId = match.getRule().getId();
ruleMatchCount.put(ruleId, ruleMatchCount.getOrDefault(ruleId, 0) + 1);
}
ServerMetricsCollector.getInstance().logCheck(
lang, computationTime, textSize, matchCount, mode);
if (!config.isSkipLoggingChecks()) {
DatabaseCheckLogEntry logEntry = new DatabaseCheckLogEntry(userId, agentId, logServerId, textSize, matchCount,
lang, detLang.getDetectedLanguage(), computationTime, textSessionId, mode.toString());
logEntry.setRuleMatches(new DatabaseRuleMatchLogEntry(
config.isSkipLoggingRuleMatches() ? Collections.emptyMap() : ruleMatchCount));
databaseLogger.log(logEntry);
}
if (databaseLogger.isLogging()) {
if (System.currentTimeMillis() - pingsCleanDateMillis > PINGS_CLEAN_MILLIS && pings.size() < PINGS_MAX_SIZE) {
logger.info("Cleaning pings DB (" + pings.size() + " items)");
pings.clear();
pingsCleanDateMillis = System.currentTimeMillis();
}
if (agentId != null && userId != null) {
DatabasePingLogEntry ping = new DatabasePingLogEntry(agentId, userId);
if (!pings.contains(ping)) {
databaseLogger.log(ping);
if (pings.size() >= PINGS_MAX_SIZE) {
// prevent pings taking up unlimited amounts of memory
logger.warn("Pings DB has reached max size: " + pings.size());
} else {
pings.add(ping);
}
}
}
}
}
private Map<String, Integer> getRuleValues(Map<String, String> parameters) {
Map<String, Integer> ruleValues = new HashMap<>();
String parameterString = parameters.get("ruleValues");
if (parameterString == null) {
return ruleValues;
}
String[] pairs = parameterString.split("[,]");
for (String pair : pairs) {
String[] ruleAndValue = pair.split("[:]");
ruleValues.put(ruleAndValue[0], Integer.parseInt(ruleAndValue[1]));
}
return ruleValues;
}
private List<String> getUserDictWords(Long userId) {
DatabaseAccess db = DatabaseAccess.getInstance();
return db.getUserDictWords(userId);
}
protected void checkParams(Map<String, String> parameters) {
if (parameters.get("text") == null && parameters.get("data") == null) {
throw new IllegalArgumentException("Missing 'text' or 'data' parameter");
}
}
private List<RuleMatch> getRuleMatches(AnnotatedText aText, Language lang,
Language motherTongue, Map<String, String> parameters,
QueryParams params, UserConfig userConfig,
DetectedLanguage detLang,
List<String> preferredLangs, List<String> preferredVariants,
RuleMatchListener listener) throws Exception {
if (cache != null && cache.requestCount() > 0 && cache.requestCount() % CACHE_STATS_PRINT == 0) {
double hitRate = cache.hitRate();
String hitPercentage = String.format(Locale.ENGLISH, "%.2f", hitRate * 100.0f);
logger.info("Cache stats: " + hitPercentage + "% hit rate");
//print("Matches : " + cache.getMatchesCache().stats().hitRate() + " hit rate");
//print("Sentences : " + cache.getSentenceCache().stats().hitRate() + " hit rate");
//print("Size : " + cache.getMatchesCache().size() + " (matches cache), " + cache.getSentenceCache().size() + " (sentence cache)");
//logger.log(new DatabaseCacheStatsLogEntry(logServerId, (float) hitRate));
}
if (parameters.get("sourceText") != null) {
if (parameters.get("sourceLanguage") == null) {
throw new IllegalArgumentException("'sourceLanguage' parameter missing - must be set when 'sourceText' is set");
}
Language sourceLanguage = Languages.getLanguageForShortCode(parameters.get("sourceLanguage"));
JLanguageTool sourceLt = new JLanguageTool(sourceLanguage);
JLanguageTool targetLt = new JLanguageTool(lang);
if (userConfig.filterDictionaryMatches()) {
targetLt.addMatchFilter(new DictionaryMatchFilter(userConfig));
}
List<BitextRule> bitextRules = Tools.getBitextRules(sourceLanguage, lang);
return Tools.checkBitext(parameters.get("sourceText"), aText.getPlainText(), sourceLt, targetLt, bitextRules);
} else {
List<RuleMatch> matches = new ArrayList<>();
if (preferredLangs.size() < 2 || parameters.get("multilingual") == null || parameters.get("multilingual").equals("false")) {
matches.addAll(getPipelineResults(aText, lang, motherTongue, params, userConfig, listener));
} else {
// support for multilingual texts:
try {
Language mainLang = getLanguageVariantForCode(detLang.getDetectedLanguage().getShortCode(), preferredVariants);
List<Language> secondLangs = new ArrayList<>();
for (String preferredLangCode : preferredLangs) {
if (!preferredLangCode.equals(mainLang.getShortCode())) {
secondLangs.add(getLanguageVariantForCode(preferredLangCode, preferredVariants));
break;
}
}
LanguageAnnotator annotator = new LanguageAnnotator();
List<FragmentWithLanguage> fragments = annotator.detectLanguages(aText.getPlainText(), mainLang, secondLangs);
List<Language> langs = new ArrayList<>();
langs.add(mainLang);
langs.addAll(secondLangs);
Map<Language, AnnotatedTextBuilder> lang2builder = getBuilderMap(fragments, new HashSet<>(langs));
for (Map.Entry<Language, AnnotatedTextBuilder> entry : lang2builder.entrySet()) {
matches.addAll(getPipelineResults(entry.getValue().build(), entry.getKey(), motherTongue, params, userConfig, listener));
}
} catch (Exception e) {
logger.error("Problem with multilingual mode (preferredLangs=" + preferredLangs+ ", preferredVariants=" + preferredVariants + "), " +
"falling back to single language.", e);
matches.addAll(getPipelineResults(aText, lang, motherTongue, params, userConfig, listener));
}
}
return matches;
}
}
private Language getLanguageVariantForCode(String langCode, List<String> preferredVariants) {
for (String preferredVariant : preferredVariants) {
if (preferredVariant.startsWith(langCode + "-")) {
return Languages.getLanguageForShortCode(preferredVariant);
}
}
return Languages.getLanguageForShortCode(langCode);
}
private List<RuleMatch> getPipelineResults(AnnotatedText aText, Language lang, Language motherTongue, QueryParams params, UserConfig userConfig, RuleMatchListener listener) throws Exception {
PipelinePool.PipelineSettings settings = null;
Pipeline lt = null;
List<RuleMatch> matches = new ArrayList<>();
try {
settings = new PipelinePool.PipelineSettings(lang, motherTongue, params, config.globalConfig, userConfig);
lt = pipelinePool.getPipeline(settings);
Long textSessionId = userConfig.getTextSessionId();
if (params.regressionTestMode) {
textSessionId = -2L; // magic value for remote rule roll-out - includes all results, even from disabled models
}
matches.addAll(lt.check(aText, true, JLanguageTool.ParagraphHandling.NORMAL, listener,
params.mode, params.level, executorService, textSessionId));
} finally {
if (lt != null) {
pipelinePool.returnPipeline(settings, lt);
}
}
return matches;
}
@NotNull
private Map<Language, AnnotatedTextBuilder> getBuilderMap(List<FragmentWithLanguage> fragments, Set<Language> maybeUsedLangs) {
Map<Language, AnnotatedTextBuilder> lang2builder = new HashMap<>();
for (Language usedLang : maybeUsedLangs) {
if (!lang2builder.containsKey(usedLang)) {
lang2builder.put(usedLang, new AnnotatedTextBuilder());
}
AnnotatedTextBuilder atb = lang2builder.get(usedLang);
for (FragmentWithLanguage fragment : fragments) {
if (usedLang.getShortCodeWithCountryAndVariant().equals(fragment.getLangCode())) {
atb.addText(fragment.getFragment());
} else {
atb.addMarkup(fragment.getFragment()); // markup = ignore this text
}
}
}
return lang2builder;
}
@NotNull
private List<CategoryId> getCategoryIds(String paramName, Map<String, String> parameters) {
List<String> stringIds = getCommaSeparatedStrings(paramName, parameters);
List<CategoryId> ids = new ArrayList<>();
for (String stringId : stringIds) {
ids.add(new CategoryId(stringId));
}
return ids;
}
@NotNull
protected List<String> getCommaSeparatedStrings(String paramName, Map<String, String> parameters) {
String disabledParam = parameters.get(paramName);
List<String> result = new ArrayList<>();
if (disabledParam != null) {
result.addAll(Arrays.asList(disabledParam.split(",")));
}
return result;
}
DetectedLanguage detectLanguageOfString(String text, String fallbackLanguage, List<String> preferredVariants,
List<String> noopLangs, List<String> preferredLangs, boolean testMode) {
DetectedLanguage detected;
String mode;
long t1 = System.nanoTime();
if (ngramIdentifier != null && text.length() < NGRAM_THRESHOLD) {
detected = ngramIdentifier.detectLanguage(text, noopLangs, preferredLangs);
mode = "ngram";
} else {
detected = fastTextIdentifier.detectLanguage(text, noopLangs, preferredLangs);
mode = fastTextIdentifier.isFastTextEnabled() ? "fasttext" : "built-in";
}
long t2 = System.nanoTime();
float runTime = (t2-t1)/1000.0f/1000.0f;
System.out.printf(Locale.ENGLISH, "detected " + detected + " using " + mode + " in %.2fms for %d chars\n", runTime, text.length());
Language lang;
if (detected == null) {
lang = Languages.getLanguageForShortCode(fallbackLanguage != null ? fallbackLanguage : "en");
} else {
lang = detected.getDetectedLanguage();
}
if (preferredVariants.size() > 0) {
for (String preferredVariant : preferredVariants) {
if (!preferredVariant.contains("-")) {
throw new IllegalArgumentException("Invalid format for 'preferredVariants', expected a dash as in 'en-GB': '" + preferredVariant + "'");
}
String preferredVariantLang = preferredVariant.split("-")[0];
if (preferredVariantLang.equals(lang.getShortCode())) {
lang = Languages.getLanguageForShortCode(preferredVariant);
if (lang == null) {
throw new IllegalArgumentException("Invalid 'preferredVariants', no such language/variant found: '" + preferredVariant + "'");
}
}
}
} else {
if (lang.getDefaultLanguageVariant() != null) {
lang = lang.getDefaultLanguageVariant();
}
}
return new DetectedLanguage(null, lang, detected != null ? detected.getDetectionConfidence() : 0f);
}
static class QueryParams {
final List<Language> altLanguages;
final List<String> enabledRules;
final List<String> disabledRules;
final List<CategoryId> enabledCategories;
final List<CategoryId> disabledCategories;
final boolean useEnabledOnly;
final boolean useQuerySettings;
final boolean allowIncompleteResults;
final boolean enableHiddenRules;
final boolean enableTempOffRules;
final JLanguageTool.Mode mode;
final JLanguageTool.Level level;
final String callback;
/** allowed to log input with stack traces to reproduce errors? */
final boolean inputLogging;
final boolean regressionTestMode; // no fallbacks for remote rules, retries, enable all rules
QueryParams(List<Language> altLanguages, List<String> enabledRules, List<String> disabledRules, List<CategoryId> enabledCategories, List<CategoryId> disabledCategories,
boolean useEnabledOnly, boolean useQuerySettings, boolean allowIncompleteResults, boolean enableHiddenRules, boolean enableTempOffRules, JLanguageTool.Mode mode, JLanguageTool.Level level, @Nullable String callback) {
this(altLanguages, enabledRules, disabledRules, enabledCategories, disabledCategories, useEnabledOnly, useQuerySettings, allowIncompleteResults, enableHiddenRules, enableTempOffRules, mode, level, callback, true);
}
QueryParams(List<Language> altLanguages, List<String> enabledRules, List<String> disabledRules, List<CategoryId> enabledCategories, List<CategoryId> disabledCategories,
boolean useEnabledOnly, boolean useQuerySettings, boolean allowIncompleteResults, boolean enableHiddenRules, boolean enableTempOffRules, JLanguageTool.Mode mode, JLanguageTool.Level level, @Nullable String callback, boolean inputLogging) {
this.altLanguages = Objects.requireNonNull(altLanguages);
this.enabledRules = enabledRules;
this.disabledRules = disabledRules;
this.enabledCategories = enabledCategories;
this.disabledCategories = disabledCategories;
this.useEnabledOnly = useEnabledOnly;
this.useQuerySettings = useQuerySettings;
this.allowIncompleteResults = allowIncompleteResults;
this.enableHiddenRules = enableHiddenRules;
this.enableTempOffRules = enableTempOffRules;
this.regressionTestMode = enableTempOffRules;
this.mode = Objects.requireNonNull(mode);
this.level = Objects.requireNonNull(level);
if (callback != null && !callback.matches("[a-zA-Z]+")) {
throw new IllegalArgumentException("'callback' value must match [a-zA-Z]+: '" + callback + "'");
}
this.callback = callback;
this.inputLogging = inputLogging;
}
@Override
public int hashCode() {
return new HashCodeBuilder()
.append(altLanguages)
.append(enabledRules)
.append(disabledRules)
.append(enabledCategories)
.append(disabledCategories)
.append(useEnabledOnly)
.append(useQuerySettings)
.append(allowIncompleteResults)
.append(enableHiddenRules)
.append(enableTempOffRules)
.append(regressionTestMode)
.append(mode)
.append(level)
.append(callback)
.append(inputLogging)
.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (obj == this) return true;
if (obj == null || getClass() != obj.getClass()) {
return false;
}
QueryParams other = (QueryParams) obj;
return new EqualsBuilder()
.append(altLanguages, other.altLanguages)
.append(enabledRules, other.enabledRules)
.append(disabledRules, other.disabledRules)
.append(enabledCategories, other.enabledCategories)
.append(disabledCategories, other.disabledCategories)
.append(useEnabledOnly, other.useEnabledOnly)
.append(useQuerySettings, other.useQuerySettings)
.append(allowIncompleteResults, other.allowIncompleteResults)
.append(enableHiddenRules, other.enableHiddenRules)
.append(enableTempOffRules, other.enableTempOffRules)
.append(regressionTestMode, other.regressionTestMode)
.append(mode, other.mode)
.append(level, other.level)
.append(callback, other.callback)
.append(inputLogging, other.inputLogging)
.isEquals();
}
@Override
public String toString() {
return new ToStringBuilder(this)
.append("altLanguages", altLanguages)
.append("enabledRules", enabledRules)
.append("disabledRules", disabledRules)
.append("enabledCategories", enabledCategories)
.append("disabledCategories", disabledCategories)
.append("useEnabledOnly", useEnabledOnly)
.append("useQuerySettings", useQuerySettings)
.append("allowIncompleteResults", allowIncompleteResults)
.append("enableHiddenRules", enableHiddenRules)
.append("enableTempOffRules", enableTempOffRules)
.append("regressionTestMode", regressionTestMode)
.append("mode", mode)
.append("level", level)
.append("callback", callback)
.append("inputLogging", inputLogging)
.build();
}
}
}
|
remove language detection logging, currently not used
|
languagetool-server/src/main/java/org/languagetool/server/TextChecker.java
|
remove language detection logging, currently not used
|
|
Java
|
apache-2.0
|
34d713229832d6472533925e8eabfd2fc0eb4d28
| 0
|
galderz/Aeron,mikeb01/Aeron,galderz/Aeron,EvilMcJerkface/Aeron,galderz/Aeron,real-logic/Aeron,galderz/Aeron,EvilMcJerkface/Aeron,real-logic/Aeron,EvilMcJerkface/Aeron,mikeb01/Aeron,mikeb01/Aeron,real-logic/Aeron,mikeb01/Aeron,real-logic/Aeron,EvilMcJerkface/Aeron
|
/*
* Copyright 2017 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.cluster;
import io.aeron.*;
import io.aeron.archive.client.AeronArchive;
import io.aeron.archive.codecs.SourceLocation;
import io.aeron.archive.status.RecordingPos;
import io.aeron.cluster.codecs.*;
import io.aeron.cluster.service.*;
import io.aeron.logbuffer.ControlledFragmentHandler;
import io.aeron.logbuffer.Header;
import io.aeron.status.ReadableCounter;
import org.agrona.*;
import org.agrona.collections.ArrayListUtil;
import org.agrona.collections.Long2ObjectHashMap;
import org.agrona.collections.LongArrayList;
import org.agrona.concurrent.*;
import org.agrona.concurrent.status.CountersReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static io.aeron.ChannelUri.SPY_QUALIFIER;
import static io.aeron.CommonContext.ENDPOINT_PARAM_NAME;
import static io.aeron.archive.client.AeronArchive.NULL_POSITION;
import static io.aeron.cluster.ClusterSession.State.*;
import static io.aeron.cluster.ConsensusModule.Configuration.SESSION_TIMEOUT_MSG;
import static io.aeron.cluster.ConsensusModule.SNAPSHOT_TYPE_ID;
import static org.agrona.concurrent.status.CountersReader.METADATA_LENGTH;
class SequencerAgent implements Agent, ServiceControlListener
{
private boolean isRecovered;
private final int memberId;
private int votedForMemberId = ClusterMember.NULL_MEMBER_ID;
private int leaderMemberId;
private int serviceAckCount = 0;
private int logSessionId;
private final long sessionTimeoutMs;
private final long heartbeatIntervalMs;
private final long heartbeatTimeoutMs;
private long nextSessionId = 1;
private long baseLogPosition = 0;
private long leadershipTermId = -1;
private long lastRecordingPosition = 0;
private long timeOfLastLogUpdateMs = 0;
private long followerCommitPosition = 0;
private long logRecordingId;
private ReadableCounter logRecordingPosition;
private Counter commitPosition;
private ConsensusModule.State state = ConsensusModule.State.INIT;
private Cluster.Role role;
private ClusterMember[] clusterMembers;
private ClusterMember leaderMember;
private final ClusterMember thisMember;
private long[] rankedPositions;
private final Counter clusterRoleCounter;
private final ClusterMarkFile markFile;
private final AgentInvoker aeronClientInvoker;
private final EpochClock epochClock;
private final CachedEpochClock cachedEpochClock = new CachedEpochClock();
private final Counter moduleState;
private final Counter controlToggle;
private final TimerService timerService;
private final ServiceControlAdapter serviceControlAdapter;
private final ServiceControlPublisher serviceControlPublisher;
private final IngressAdapter ingressAdapter;
private final EgressPublisher egressPublisher;
private final LogPublisher logPublisher;
private LogAdapter logAdapter;
private final MemberStatusAdapter memberStatusAdapter;
private final MemberStatusPublisher memberStatusPublisher = new MemberStatusPublisher();
private final Long2ObjectHashMap<ClusterSession> sessionByIdMap = new Long2ObjectHashMap<>();
private final ArrayList<ClusterSession> pendingSessions = new ArrayList<>();
private final ArrayList<ClusterSession> rejectedSessions = new ArrayList<>();
private final Authenticator authenticator;
private final SessionProxy sessionProxy;
private final Aeron aeron;
private AeronArchive archive;
private final ConsensusModule.Context ctx;
private final UnsafeBuffer tempBuffer = new UnsafeBuffer(new byte[METADATA_LENGTH]);
private final IdleStrategy idleStrategy;
private final LongArrayList failedTimerCancellations = new LongArrayList();
private RecordingLog.RecoveryPlan recoveryPlan;
SequencerAgent(
final ConsensusModule.Context ctx,
final EgressPublisher egressPublisher,
final LogPublisher logPublisher)
{
this.ctx = ctx;
this.aeron = ctx.aeron();
this.epochClock = ctx.epochClock();
this.sessionTimeoutMs = TimeUnit.NANOSECONDS.toMillis(ctx.sessionTimeoutNs());
this.heartbeatIntervalMs = TimeUnit.NANOSECONDS.toMillis(ctx.heartbeatIntervalNs());
this.heartbeatTimeoutMs = TimeUnit.NANOSECONDS.toMillis(ctx.heartbeatTimeoutNs());
this.egressPublisher = egressPublisher;
this.moduleState = ctx.moduleStateCounter();
this.controlToggle = ctx.controlToggleCounter();
this.logPublisher = logPublisher;
this.idleStrategy = ctx.idleStrategy();
this.timerService = new TimerService(this);
this.clusterMembers = ClusterMember.parse(ctx.clusterMembers());
this.sessionProxy = new SessionProxy(egressPublisher);
this.memberId = ctx.clusterMemberId();
this.leaderMemberId = ctx.appointedLeaderId();
this.clusterRoleCounter = ctx.clusterNodeCounter();
this.markFile = ctx.clusterMarkFile();
aeronClientInvoker = ctx.ownsAeronClient() ? ctx.aeron().conductorAgentInvoker() : null;
invokeAeronClient();
rankedPositions = new long[ClusterMember.quorumThreshold(clusterMembers.length)];
role(Cluster.Role.FOLLOWER);
thisMember = clusterMembers[memberId];
final ChannelUri memberStatusUri = ChannelUri.parse(ctx.memberStatusChannel());
memberStatusUri.put(ENDPOINT_PARAM_NAME, thisMember.memberFacingEndpoint());
final int statusStreamId = ctx.memberStatusStreamId();
memberStatusAdapter = new MemberStatusAdapter(
aeron.addSubscription(memberStatusUri.toString(), statusStreamId), this);
ClusterMember.addMemberStatusPublications(clusterMembers, thisMember, memberStatusUri, statusStreamId, aeron);
final ChannelUri ingressUri = ChannelUri.parse(ctx.ingressChannel());
if (!ingressUri.containsKey(ENDPOINT_PARAM_NAME))
{
ingressUri.put(ENDPOINT_PARAM_NAME, thisMember.clientFacingEndpoint());
}
ingressAdapter = new IngressAdapter(
aeron.addSubscription(ingressUri.toString(), ctx.ingressStreamId()), this, ctx.invalidRequestCounter());
serviceControlAdapter = new ServiceControlAdapter(
aeron.addSubscription(ctx.serviceControlChannel(), ctx.serviceControlStreamId()), this);
serviceControlPublisher = new ServiceControlPublisher(
aeron.addPublication(ctx.serviceControlChannel(), ctx.serviceControlStreamId()));
authenticator = ctx.authenticatorSupplier().newAuthenticator(ctx);
}
public void onClose()
{
CloseHelper.close(archive);
if (!ctx.ownsAeronClient())
{
for (final ClusterSession session : sessionByIdMap.values())
{
session.close();
}
CloseHelper.close(memberStatusAdapter);
ClusterMember.closeMemberPublications(clusterMembers);
logPublisher.disconnect();
CloseHelper.close(ingressAdapter);
CloseHelper.close(serviceControlPublisher);
CloseHelper.close(serviceControlAdapter);
}
}
public void onStart()
{
archive = AeronArchive.connect(ctx.archiveContext());
recoveryPlan = ctx.recordingLog().createRecoveryPlan(archive);
serviceAckCount = 0;
try (Counter ignore = addRecoveryStateCounter(recoveryPlan))
{
if (null != recoveryPlan.snapshotStep)
{
recoverFromSnapshot(recoveryPlan.snapshotStep, archive);
}
awaitServiceAcks();
if (recoveryPlan.termSteps.size() > 0)
{
recoverFromLog(recoveryPlan.termSteps, archive);
}
isRecovered = true;
}
state(ConsensusModule.State.ACTIVE); // TODO: handle suspended case
leadershipTermId++;
if (clusterMembers.length > 1)
{
electLeader();
}
if (memberId == leaderMemberId || clusterMembers.length == 1)
{
becomeLeader();
}
else
{
becomeFollower();
}
final long nowMs = epochClock.time();
cachedEpochClock.update(nowMs);
timeOfLastLogUpdateMs = nowMs;
ctx.recordingLog().appendTerm(logRecordingId, leadershipTermId, baseLogPosition, nowMs, leaderMemberId);
}
public int doWork()
{
int workCount = 0;
boolean isSlowTickCycle = false;
final long nowMs = epochClock.time();
if (cachedEpochClock.time() != nowMs)
{
isSlowTickCycle = true;
cachedEpochClock.update(nowMs);
}
switch (role)
{
case LEADER:
if (ConsensusModule.State.ACTIVE == state)
{
workCount += ingressAdapter.poll();
}
break;
case FOLLOWER:
if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state)
{
workCount += logAdapter.poll(followerCommitPosition);
}
break;
}
workCount += memberStatusAdapter.poll();
workCount += updateMemberPosition(nowMs);
if (isSlowTickCycle)
{
workCount += slowTickCycle(nowMs);
}
return workCount;
}
public String roleName()
{
return "sequencer";
}
public void onServiceAck(
final long logPosition, final long leadershipTermId, final int serviceId, final ClusterAction action)
{
validateServiceAck(logPosition, leadershipTermId, serviceId, action);
if (++serviceAckCount == ctx.serviceCount())
{
final long termPosition = logPosition - baseLogPosition;
switch (action)
{
case SNAPSHOT:
ctx.snapshotCounter().incrementOrdered();
state(ConsensusModule.State.ACTIVE);
ClusterControl.ToggleState.reset(controlToggle);
final long nowNs = epochClock.time();
for (final ClusterSession session : sessionByIdMap.values())
{
session.timeOfLastActivityMs(nowNs);
}
break;
case SHUTDOWN:
ctx.snapshotCounter().incrementOrdered();
ctx.recordingLog().commitLeadershipTermPosition(leadershipTermId, termPosition);
state(ConsensusModule.State.CLOSED);
ctx.terminationHook().run();
break;
case ABORT:
ctx.recordingLog().commitLeadershipTermPosition(leadershipTermId, termPosition);
state(ConsensusModule.State.CLOSED);
ctx.terminationHook().run();
break;
}
}
else if (serviceAckCount > ctx.serviceCount())
{
throw new IllegalStateException("Service count exceeded: " + serviceAckCount);
}
}
public void onSessionConnect(
final long correlationId,
final int responseStreamId,
final String responseChannel,
final byte[] credentialData)
{
final long nowMs = cachedEpochClock.time();
final long sessionId = nextSessionId++;
final ClusterSession session = new ClusterSession(sessionId, responseStreamId, responseChannel);
session.connect(aeron);
session.lastActivity(nowMs, correlationId);
if (pendingSessions.size() + sessionByIdMap.size() < ctx.maxConcurrentSessions())
{
authenticator.onConnectRequest(sessionId, credentialData, nowMs);
pendingSessions.add(session);
}
else
{
rejectedSessions.add(session);
}
}
public void onSessionClose(final long clusterSessionId)
{
closeSession(clusterSessionId, CloseReason.USER_ACTION);
}
public ControlledFragmentAssembler.Action onSessionMessage(
final DirectBuffer buffer,
final int offset,
final int length,
final long clusterSessionId,
final long correlationId)
{
final long nowMs = cachedEpochClock.time();
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null == session || session.state() == CLOSED)
{
return ControlledFragmentHandler.Action.CONTINUE;
}
if (session.state() == OPEN && logPublisher.appendMessage(buffer, offset, length, nowMs))
{
session.lastActivity(nowMs, correlationId);
return ControlledFragmentHandler.Action.CONTINUE;
}
return ControlledFragmentHandler.Action.ABORT;
}
public void onSessionKeepAlive(final long clusterSessionId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.timeOfLastActivityMs(cachedEpochClock.time());
}
}
public void onChallengeResponse(final long correlationId, final long clusterSessionId, final byte[] credentialData)
{
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (session.id() == clusterSessionId && session.state() == CHALLENGED)
{
final long nowMs = cachedEpochClock.time();
session.lastActivity(nowMs, correlationId);
authenticator.onChallengeResponse(clusterSessionId, credentialData, nowMs);
break;
}
}
}
public void onAdminQuery(final long correlationId, final long clusterSessionId, final AdminQueryType queryType)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session && session.state() == OPEN)
{
switch (queryType)
{
case ENDPOINTS:
final ChannelUri archiveChannelUri = ChannelUri.parse(ctx.archiveContext().controlRequestChannel());
final String endpointsDetail =
"id=" + Long.toString(thisMember.id()) +
",memberStatus=" + thisMember.memberFacingEndpoint() +
",log=" + thisMember.memberFacingEndpoint() +
",archive=" + archiveChannelUri.get(ENDPOINT_PARAM_NAME);
final long nowMs = cachedEpochClock.time();
session.lastActivity(nowMs, correlationId);
session.adminQueryResponseDetail(endpointsDetail);
if (egressPublisher.sendEvent(session, EventCode.OK, session.adminQueryResponseDetail()))
{
session.adminQueryResponseDetail(null);
}
break;
case RECORDING_LOG: // TODO: or should this really be recoveryPlan?
// TODO: send recordingLog as a byte[]
break;
}
}
}
public boolean onTimerEvent(final long correlationId, final long nowMs)
{
return logPublisher.appendTimerEvent(correlationId, nowMs);
}
public void onScheduleTimer(final long correlationId, final long deadlineMs)
{
timerService.scheduleTimer(correlationId, deadlineMs);
}
public void onCancelTimer(final long correlationId)
{
timerService.cancelTimer(correlationId);
}
public void onServiceCloseSession(final long clusterSessionId)
{
if (Cluster.Role.LEADER == role)
{
closeSession(clusterSessionId, CloseReason.SERVICE_ACTION);
}
}
void state(final ConsensusModule.State state)
{
this.state = state;
moduleState.set(state.code());
}
void role(final Cluster.Role role)
{
this.role = role;
clusterRoleCounter.setOrdered(role.code());
}
void logRecordingPositionCounter(final ReadableCounter logRecordingPosition)
{
this.logRecordingPosition = logRecordingPosition;
}
void commitPositionCounter(final Counter commitPosition)
{
this.commitPosition = commitPosition;
}
@SuppressWarnings("unused")
void onReplaySessionMessage(
final long correlationId,
final long clusterSessionId,
final long timestamp,
final DirectBuffer buffer,
final int offset,
final int length,
final Header header)
{
cachedEpochClock.update(timestamp);
sessionByIdMap.get(clusterSessionId).lastActivity(timestamp, correlationId);
}
void onReplayTimerEvent(final long correlationId, final long timestamp)
{
cachedEpochClock.update(timestamp);
if (!timerService.cancelTimer(correlationId))
{
failedTimerCancellations.addLong(correlationId);
}
}
void onReplaySessionOpen(
final long termPosition,
final long correlationId,
final long clusterSessionId,
final long timestamp,
final int responseStreamId,
final String responseChannel)
{
cachedEpochClock.update(timestamp);
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.open(termPosition);
session.lastActivity(timestamp, correlationId);
sessionByIdMap.put(clusterSessionId, session);
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
void onLoadSession(
final long termPosition,
final long correlationId,
final long clusterSessionId,
final long timestamp,
final CloseReason closeReason,
final int responseStreamId,
final String responseChannel)
{
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.closeReason(closeReason);
session.open(termPosition);
session.lastActivity(timestamp, correlationId);
if (CloseReason.NULL_VAL != closeReason)
{
session.close();
}
sessionByIdMap.put(clusterSessionId, session);
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
@SuppressWarnings("unused")
void onReplaySessionClose(
final long correlationId,
final long clusterSessionId,
final long timestamp,
final CloseReason closeReason)
{
cachedEpochClock.update(timestamp);
sessionByIdMap.remove(clusterSessionId).close();
}
@SuppressWarnings("unused")
void onReplayClusterAction(
final long logPosition, final long leadershipTermId, final long timestamp, final ClusterAction action)
{
cachedEpochClock.update(timestamp);
final long termPosition = logPosition - baseLogPosition;
switch (action)
{
case SUSPEND:
state(ConsensusModule.State.SUSPENDED);
break;
case RESUME:
state(ConsensusModule.State.ACTIVE);
break;
case SNAPSHOT:
if (isRecovered)
{
serviceAckCount = 0;
state(ConsensusModule.State.SNAPSHOT);
takeSnapshot(timestamp, termPosition);
}
break;
case SHUTDOWN:
if (isRecovered)
{
serviceAckCount = 0;
state(ConsensusModule.State.SHUTDOWN);
takeSnapshot(timestamp, termPosition);
}
break;
case ABORT:
if (isRecovered)
{
serviceAckCount = 0;
state(ConsensusModule.State.ABORT);
}
break;
}
}
void onReloadState(final long nextSessionId)
{
this.nextSessionId = nextSessionId;
}
void onRequestVote(
final long candidateTermId,
final long lastBaseLogPosition,
final long lastTermPosition,
final int candidateId)
{
if (Cluster.Role.FOLLOWER == role &&
candidateTermId == leadershipTermId &&
lastBaseLogPosition == recoveryPlan.lastLogPosition)
{
final boolean vote = lastTermPosition >= recoveryPlan.lastTermPositionAppended;
sendVote(candidateTermId, lastBaseLogPosition, lastTermPosition, candidateId, vote);
if (!vote)
{
// TODO: become candidate in new election
throw new IllegalStateException("Invalid member for cluster leader: " + candidateId);
}
else
{
votedForMemberId = candidateId;
if (recoveryPlan.lastTermPositionAppended < lastTermPosition)
{
// TODO: need to catch up with leader
}
}
}
else
{
sendVote(candidateTermId, lastBaseLogPosition, lastTermPosition, candidateId, false);
}
}
void onVote(
final long candidateTermId,
final long lastBaseLogPosition,
final long lastTermPosition,
final int candidateMemberId,
final int followerMemberId,
final boolean vote)
{
if (Cluster.Role.FOLLOWER == role &&
candidateTermId == leadershipTermId &&
lastBaseLogPosition == recoveryPlan.lastLogPosition &&
lastTermPosition == recoveryPlan.lastTermPositionAppended &&
candidateMemberId == memberId)
{
if (vote)
{
clusterMembers[followerMemberId].votedForId(candidateMemberId);
}
else
{
throw new IllegalStateException("Invalid member for cluster leader: " + candidateMemberId);
}
}
}
void onAppendedPosition(final long termPosition, final long leadershipTermId, final int followerMemberId)
{
if (leadershipTermId == this.leadershipTermId)
{
clusterMembers[followerMemberId].termPosition(termPosition);
}
}
void onCommitPosition(
final long termPosition, final long leadershipTermId, final int leaderMemberId, final int logSessionId)
{
if (leadershipTermId == this.leadershipTermId)
{
if (leaderMemberId != this.leaderMemberId)
{
throw new IllegalStateException("Commit position not for current leader: expected=" +
this.leaderMemberId + " received=" + leaderMemberId);
}
if (0 == termPosition && leaderMemberId == votedForMemberId && this.logSessionId != logSessionId)
{
this.logSessionId = logSessionId;
}
timeOfLastLogUpdateMs = cachedEpochClock.time();
followerCommitPosition = termPosition;
}
}
private void closeSession(final long clusterSessionId, final CloseReason closeReason)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.closeReason(closeReason);
session.close();
if (appendClosedSession(session, cachedEpochClock.time()))
{
sessionByIdMap.remove(clusterSessionId);
}
}
}
private int slowTickCycle(final long nowMs)
{
int workCount = 0;
markFile.updateActivityTimestamp(nowMs);
workCount += invokeAeronClient();
workCount += serviceControlAdapter.poll();
if (Cluster.Role.LEADER == role)
{
workCount += checkControlToggle(nowMs);
if (ConsensusModule.State.ACTIVE == state)
{
workCount += processPendingSessions(pendingSessions, nowMs);
workCount += checkSessions(sessionByIdMap, nowMs);
workCount += processRejectedSessions(rejectedSessions, nowMs);
workCount += timerService.poll(nowMs);
}
}
if (null != archive)
{
archive.checkForErrorResponse();
}
return workCount;
}
private int checkControlToggle(final long nowMs)
{
switch (ClusterControl.ToggleState.get(controlToggle))
{
case SUSPEND:
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SUSPEND, nowMs))
{
state(ConsensusModule.State.SUSPENDED);
ClusterControl.ToggleState.reset(controlToggle);
}
break;
case RESUME:
if (ConsensusModule.State.SUSPENDED == state && appendAction(ClusterAction.RESUME, nowMs))
{
state(ConsensusModule.State.ACTIVE);
ClusterControl.ToggleState.reset(controlToggle);
}
break;
case SNAPSHOT:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT, nowMs))
{
state(ConsensusModule.State.SNAPSHOT);
takeSnapshot(nowMs, logPublisher.position());
}
break;
case SHUTDOWN:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SHUTDOWN, nowMs))
{
state(ConsensusModule.State.SHUTDOWN);
takeSnapshot(nowMs, logPublisher.position());
}
break;
case ABORT:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.ABORT, nowMs))
{
state(ConsensusModule.State.ABORT);
}
break;
default:
return 0;
}
return 1;
}
private void sendVote(
final long candidateTermId,
final long lastBaseLogPosition,
final long lastTermPosition,
final int candidateId,
final boolean vote)
{
idleStrategy.reset();
while (!memberStatusPublisher.vote(
clusterMembers[candidateId].publication(),
candidateTermId,
lastBaseLogPosition,
lastTermPosition,
candidateId,
memberId,
vote))
{
idle();
}
}
private boolean appendAction(final ClusterAction action, final long nowMs)
{
final long position = baseLogPosition +
logPublisher.position() +
MessageHeaderEncoder.ENCODED_LENGTH +
ClusterActionRequestEncoder.BLOCK_LENGTH;
return logPublisher.appendClusterAction(action, leadershipTermId, position, nowMs);
}
private int processPendingSessions(final ArrayList<ClusterSession> pendingSessions, final long nowMs)
{
int workCount = 0;
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (session.state() == INIT || session.state() == CONNECTED)
{
if (session.isResponsePublicationConnected())
{
session.state(CONNECTED);
authenticator.onProcessConnectedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == CHALLENGED)
{
if (session.isResponsePublicationConnected())
{
authenticator.onProcessChallengedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == AUTHENTICATED)
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.timeOfLastActivityMs(nowMs);
sessionByIdMap.put(session.id(), session);
appendConnectedSession(session, nowMs);
workCount += 1;
}
else if (session.state() == REJECTED)
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
rejectedSessions.add(session);
}
else if (nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.close();
}
}
return workCount;
}
private int processRejectedSessions(final ArrayList<ClusterSession> rejectedSessions, final long nowMs)
{
int workCount = 0;
for (int lastIndex = rejectedSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = rejectedSessions.get(i);
String detail = ConsensusModule.Configuration.SESSION_LIMIT_MSG;
EventCode eventCode = EventCode.ERROR;
if (session.state() == REJECTED)
{
detail = ConsensusModule.Configuration.SESSION_REJECTED_MSG;
eventCode = EventCode.AUTHENTICATION_REJECTED;
}
if (egressPublisher.sendEvent(session, eventCode, detail) ||
nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
ArrayListUtil.fastUnorderedRemove(rejectedSessions, i, lastIndex--);
session.close();
workCount++;
}
}
return workCount;
}
private int checkSessions(final Long2ObjectHashMap<ClusterSession> sessionByIdMap, final long nowMs)
{
int workCount = 0;
for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); )
{
final ClusterSession session = i.next();
final ClusterSession.State state = session.state();
if (nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
switch (state)
{
case OPEN:
egressPublisher.sendEvent(session, EventCode.ERROR, SESSION_TIMEOUT_MSG);
session.closeReason(CloseReason.TIMEOUT);
session.close();
if (appendClosedSession(session, nowMs))
{
i.remove();
}
break;
case CLOSED:
if (appendClosedSession(session, nowMs))
{
session.close();
i.remove();
}
break;
default:
session.close();
i.remove();
}
workCount += 1;
}
else if (state == CONNECTED)
{
appendConnectedSession(session, nowMs);
workCount += 1;
}
else if (state == OPEN && session.adminQueryResponseDetail() != null)
{
if (egressPublisher.sendEvent(session, EventCode.OK, session.adminQueryResponseDetail()))
{
session.adminQueryResponseDetail(null);
}
}
}
return workCount;
}
private void appendConnectedSession(final ClusterSession session, final long nowMs)
{
final long resultingPosition = logPublisher.appendConnectedSession(session, nowMs);
if (resultingPosition > 0)
{
session.open(resultingPosition);
}
}
private boolean appendClosedSession(final ClusterSession session, final long nowMs)
{
if (logPublisher.appendClosedSession(session, nowMs))
{
session.close();
return true;
}
return false;
}
private void electLeader()
{
awaitConnectedMembers();
if (ctx.appointedLeaderId() == memberId)
{
role(Cluster.Role.CANDIDATE);
ClusterMember.becomeCandidate(clusterMembers, memberId);
votedForMemberId = memberId;
for (final ClusterMember member : clusterMembers)
{
idleStrategy.reset();
while (!memberStatusPublisher.requestVote(
member.publication(),
leadershipTermId,
recoveryPlan.lastLogPosition,
recoveryPlan.lastTermPositionAppended,
memberId))
{
idle();
}
}
do
{
idle(memberStatusAdapter.poll());
}
while (ClusterMember.awaitingVotes(clusterMembers));
leaderMemberId = memberId;
leaderMember = thisMember;
}
else
{
votedForMemberId = ClusterMember.NULL_MEMBER_ID;
do
{
idle(memberStatusAdapter.poll());
}
while (ClusterMember.NULL_MEMBER_ID == leaderMemberId);
}
}
private void awaitConnectedMembers()
{
idleStrategy.reset();
while (true)
{
if (ClusterMember.arePublicationsConnected(clusterMembers))
{
break;
}
idle();
}
}
private void becomeLeader()
{
updateMemberDetails(leaderMemberId);
role(Cluster.Role.LEADER);
final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel());
final Publication publication = aeron.addExclusivePublication(ctx.logChannel(), ctx.logStreamId());
if (!channelUri.containsKey(CommonContext.ENDPOINT_PARAM_NAME))
{
final ChannelUriStringBuilder builder = new ChannelUriStringBuilder().media("udp");
for (final ClusterMember member : clusterMembers)
{
if (member.id() != memberId)
{
publication.addDestination(builder.endpoint(member.logEndpoint()).build());
}
}
}
logAdapter = null;
logPublisher.connect(publication);
logSessionId = publication.sessionId();
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(logSessionId));
final String recordingChannel = channelUri.toString();
archive.startRecording(recordingChannel, ctx.logStreamId(), SourceLocation.LOCAL);
createPositionCounters();
awaitServicesReady(channelUri, true);
awaitFollowersReady();
final long nowMs = epochClock.time();
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() != CLOSED)
{
session.connect(aeron);
session.timeOfLastActivityMs(nowMs);
}
}
}
private void becomeFollower()
{
leaderMember = clusterMembers[leaderMemberId];
followerCommitPosition = NULL_POSITION;
updateMemberDetails(leaderMemberId);
role(Cluster.Role.FOLLOWER);
while (NULL_POSITION == followerCommitPosition)
{
final int fragments = memberStatusAdapter.poll();
idle(fragments);
}
final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel());
channelUri.put(CommonContext.ENDPOINT_PARAM_NAME, thisMember.logEndpoint());
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(logSessionId));
final String logChannel = channelUri.toString();
final int streamId = ctx.logStreamId();
archive.startRecording(logChannel, streamId, SourceLocation.REMOTE);
logAdapter = new LogAdapter(awaitImage(logSessionId, aeron.addSubscription(logChannel, streamId)), this);
createPositionCounters();
awaitServicesReady(channelUri, false);
}
private void awaitFollowersReady()
{
ClusterMember.resetTermPositions(clusterMembers, -1);
clusterMembers[memberId].termPosition(logRecordingPosition.get());
do
{
final long nowMs = epochClock.time();
if (nowMs > (timeOfLastLogUpdateMs + heartbeatIntervalMs))
{
timeOfLastLogUpdateMs = nowMs;
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
memberStatusPublisher.commitPosition(
member.publication(), baseLogPosition, leadershipTermId, memberId, logSessionId);
}
}
}
idle(memberStatusAdapter.poll());
}
while (!ClusterMember.hasReachedPosition(clusterMembers, 0));
}
private void createPositionCounters()
{
final CountersReader counters = aeron.countersReader();
final int recordingCounterId = awaitRecordingCounter(counters, logSessionId);
logRecordingPosition = new ReadableCounter(counters, recordingCounterId);
logRecordingId = RecordingPos.getRecordingId(counters, logRecordingPosition.counterId());
commitPosition = CommitPos.allocate(
aeron, tempBuffer, logRecordingId, baseLogPosition, leadershipTermId, logSessionId);
}
private void awaitServicesReady(final ChannelUri channelUri, final boolean isLeader)
{
serviceAckCount = 0;
final String channel = isLeader ? channelUri.prefix(SPY_QUALIFIER).toString() : channelUri.toString();
serviceControlPublisher.joinLog(
leadershipTermId, commitPosition.id(), logSessionId, ctx.logStreamId(), channel);
awaitServiceAcks();
}
private void updateMemberDetails(final int leaderMemberId)
{
for (final ClusterMember clusterMember : clusterMembers)
{
clusterMember.isLeader(clusterMember.id() == leaderMemberId);
}
updateClusterMemberDetails(clusterMembers);
}
private void recoverFromSnapshot(final RecordingLog.ReplayStep snapshotStep, final AeronArchive archive)
{
final RecordingLog.Entry snapshot = snapshotStep.entry;
cachedEpochClock.update(snapshot.timestamp);
baseLogPosition = snapshot.logPosition;
leadershipTermId = snapshot.leadershipTermId;
final long recordingId = snapshot.recordingId;
final RecordingExtent recordingExtent = new RecordingExtent();
if (0 == archive.listRecording(recordingId, recordingExtent))
{
throw new IllegalStateException("Could not find recordingId: " + recordingId);
}
final String channel = ctx.replayChannel();
final int streamId = ctx.replayStreamId();
final long length = recordingExtent.stopPosition - recordingExtent.startPosition;
final int sessionId = (int)archive.startReplay(recordingId, 0, length, channel, streamId);
final String replaySubscriptionChannel = ChannelUri.addSessionId(channel, sessionId);
try (Subscription subscription = aeron.addSubscription(replaySubscriptionChannel, streamId))
{
final Image image = awaitImage(sessionId, subscription);
final SnapshotLoader snapshotLoader = new SnapshotLoader(image, this);
while (true)
{
final int fragments = snapshotLoader.poll();
if (fragments == 0)
{
if (snapshotLoader.isDone())
{
break;
}
if (image.isClosed())
{
throw new IllegalStateException("Snapshot ended unexpectedly");
}
}
idle(fragments);
}
}
}
private Image awaitImage(final int sessionId, final Subscription subscription)
{
idleStrategy.reset();
Image image;
while ((image = subscription.imageBySessionId(sessionId)) == null)
{
idle();
}
return image;
}
private void recoverFromLog(final List<RecordingLog.ReplayStep> steps, final AeronArchive archive)
{
final int streamId = ctx.replayStreamId();
final ChannelUri channelUri = ChannelUri.parse(ctx.replayChannel());
for (int i = 0, size = steps.size(); i < size; i++)
{
final RecordingLog.ReplayStep step = steps.get(i);
final RecordingLog.Entry entry = step.entry;
final long recordingId = entry.recordingId;
final long startPosition = step.recordingStartPosition;
final long stopPosition = step.recordingStopPosition;
final long length = NULL_POSITION == stopPosition ? Long.MAX_VALUE : stopPosition - startPosition;
final long logPosition = entry.logPosition;
if (logPosition != baseLogPosition)
{
throw new IllegalStateException("base position for log not as expected: expected " +
baseLogPosition + " actual is " + logPosition + ", " + step);
}
leadershipTermId = entry.leadershipTermId;
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(i));
final String channel = channelUri.toString();
try (Counter counter = CommitPos.allocate(
aeron, tempBuffer, recordingId, logPosition, leadershipTermId, i);
Subscription subscription = aeron.addSubscription(channel, streamId))
{
counter.setOrdered(stopPosition);
serviceAckCount = 0;
serviceControlPublisher.joinLog(leadershipTermId, counter.id(), i, streamId, channel);
awaitServiceAcks();
final int sessionId = (int)archive.startReplay(recordingId, startPosition, length, channel, streamId);
if (i != sessionId)
{
throw new IllegalStateException("Session id not for iteration: " + sessionId);
}
final Image image = awaitImage(sessionId, subscription);
serviceAckCount = 0;
replayTerm(image, stopPosition);
awaitServiceAcks();
final long termPosition = image.position();
if (step.entry.termPosition < termPosition)
{
ctx.recordingLog().commitLeadershipTermPosition(leadershipTermId, termPosition);
}
baseLogPosition += termPosition;
failedTimerCancellations.forEachOrderedLong(timerService::cancelTimer);
failedTimerCancellations.clear();
}
}
failedTimerCancellations.trimToSize();
}
private Counter addRecoveryStateCounter(final RecordingLog.RecoveryPlan plan)
{
final int termCount = plan.termSteps.size();
final RecordingLog.ReplayStep snapshotStep = plan.snapshotStep;
if (null != snapshotStep)
{
final RecordingLog.Entry snapshot = snapshotStep.entry;
return RecoveryState.allocate(
aeron, tempBuffer, snapshot.leadershipTermId, snapshot.termPosition, snapshot.timestamp, termCount);
}
return RecoveryState.allocate(aeron, tempBuffer, leadershipTermId, NULL_POSITION, 0, termCount);
}
private void awaitServiceAcks()
{
while (true)
{
final int fragmentsRead = serviceControlAdapter.poll();
if (serviceAckCount >= ctx.serviceCount())
{
break;
}
idle(fragmentsRead);
}
}
private void validateServiceAck(
final long logPosition, final long leadershipTermId, final int serviceId, final ClusterAction action)
{
final long currentLogPosition = baseLogPosition + currentTermPosition();
if (logPosition != currentLogPosition || leadershipTermId != this.leadershipTermId)
{
throw new IllegalStateException("Invalid log state:" +
" serviceId=" + serviceId +
", logPosition=" + logPosition + " current is " + currentLogPosition +
", leadershipTermId=" + leadershipTermId + " current is " + this.leadershipTermId);
}
if (!state.isValid(action))
{
throw new IllegalStateException("Invalid action ack for state " + state + " action " + action);
}
}
private long currentTermPosition()
{
return null != logAdapter ? logAdapter.position() : logPublisher.position();
}
private void updateClusterMemberDetails(final ClusterMember[] members)
{
int leaderIndex = 0;
for (int i = 0, length = members.length; i < length; i++)
{
if (members[i].isLeader())
{
leaderIndex = i;
break;
}
}
final StringBuilder builder = new StringBuilder(100);
builder.append(members[leaderIndex].clientFacingEndpoint());
for (int i = 0, length = members.length; i < length; i++)
{
if (i != leaderIndex)
{
builder.append(',').append(members[i].clientFacingEndpoint());
}
}
sessionProxy.memberEndpointsDetail(builder.toString());
}
private int updateMemberPosition(final long nowMs)
{
int workCount = 0;
switch (role)
{
case LEADER:
{
thisMember.termPosition(logRecordingPosition.get());
final long position = ClusterMember.quorumPosition(clusterMembers, rankedPositions);
if (position > commitPosition.getWeak() || nowMs >= (timeOfLastLogUpdateMs + heartbeatIntervalMs))
{
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
memberStatusPublisher.commitPosition(
member.publication(), position, leadershipTermId, memberId, logSessionId);
}
}
commitPosition.setOrdered(position);
timeOfLastLogUpdateMs = nowMs;
workCount = 1;
}
break;
}
case FOLLOWER:
{
final long recordingPosition = logRecordingPosition.get();
if (recordingPosition != lastRecordingPosition)
{
final Publication publication = leaderMember.publication();
if (memberStatusPublisher.appendedPosition(
publication, recordingPosition, leadershipTermId, memberId))
{
lastRecordingPosition = recordingPosition;
}
workCount = 1;
}
commitPosition.proposeMaxOrdered(logAdapter.position());
if (nowMs >= (timeOfLastLogUpdateMs + heartbeatTimeoutMs))
{
throw new AgentTerminationException("No heartbeat detected from cluster leader");
}
break;
}
}
return workCount;
}
private void idle()
{
checkInterruptedStatus();
invokeAeronClient();
idleStrategy.idle();
}
private void idle(final int workCount)
{
checkInterruptedStatus();
invokeAeronClient();
idleStrategy.idle(workCount);
}
private static void checkInterruptedStatus()
{
if (Thread.currentThread().isInterrupted())
{
throw new RuntimeException("Unexpected interrupt");
}
}
private int invokeAeronClient()
{
int workCount = 0;
if (null != aeronClientInvoker)
{
workCount += aeronClientInvoker.invoke();
}
return workCount;
}
private void takeSnapshot(final long timestampMs, final long termPosition)
{
final long recordingId;
final long logPosition = baseLogPosition + termPosition;
final String channel = ctx.snapshotChannel();
final int streamId = ctx.snapshotStreamId();
try (Publication publication = archive.addRecordedExclusivePublication(channel, streamId))
{
try
{
final CountersReader counters = aeron.countersReader();
final int counterId = awaitRecordingCounter(counters, publication.sessionId());
recordingId = RecordingPos.getRecordingId(counters, counterId);
snapshotState(publication, logPosition, leadershipTermId);
awaitRecordingComplete(recordingId, publication.position(), counters, counterId);
}
finally
{
archive.stopRecording(publication);
}
}
ctx.recordingLog().appendSnapshot(recordingId, leadershipTermId, baseLogPosition, termPosition, timestampMs);
}
private void awaitRecordingComplete(
final long recordingId, final long completePosition, final CountersReader counters, final int counterId)
{
idleStrategy.reset();
do
{
idle();
if (!RecordingPos.isActive(counters, counterId, recordingId))
{
throw new IllegalStateException("Recording has stopped unexpectedly: " + recordingId);
}
}
while (counters.getCounterValue(counterId) < completePosition);
}
private int awaitRecordingCounter(final CountersReader counters, final int sessionId)
{
idleStrategy.reset();
int counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
while (CountersReader.NULL_COUNTER_ID == counterId)
{
idle();
counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
}
return counterId;
}
private void snapshotState(final Publication publication, final long logPosition, final long leadershipTermId)
{
final ConsensusModuleSnapshotTaker snapshotTaker = new ConsensusModuleSnapshotTaker(
publication, idleStrategy, aeronClientInvoker);
snapshotTaker.markBegin(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0);
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN)
{
snapshotTaker.snapshotSession(session);
}
}
invokeAeronClient();
timerService.snapshot(snapshotTaker);
snapshotTaker.sequencerState(nextSessionId);
snapshotTaker.markEnd(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0);
}
private void replayTerm(final Image image, final long finalTermPosition)
{
logAdapter = new LogAdapter(image, this);
while (true)
{
final int fragments = logAdapter.poll(finalTermPosition);
if (fragments == 0)
{
if (image.isClosed())
{
if (!image.isEndOfStream())
{
throw new IllegalStateException("Unexpected close");
}
break;
}
}
idle(fragments);
}
}
}
|
aeron-cluster/src/main/java/io/aeron/cluster/SequencerAgent.java
|
/*
* Copyright 2017 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.aeron.cluster;
import io.aeron.*;
import io.aeron.archive.client.AeronArchive;
import io.aeron.archive.codecs.SourceLocation;
import io.aeron.archive.status.RecordingPos;
import io.aeron.cluster.codecs.*;
import io.aeron.cluster.service.*;
import io.aeron.logbuffer.ControlledFragmentHandler;
import io.aeron.logbuffer.Header;
import io.aeron.status.ReadableCounter;
import org.agrona.*;
import org.agrona.collections.ArrayListUtil;
import org.agrona.collections.Long2ObjectHashMap;
import org.agrona.collections.LongArrayList;
import org.agrona.concurrent.*;
import org.agrona.concurrent.status.CountersReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static io.aeron.ChannelUri.SPY_QUALIFIER;
import static io.aeron.CommonContext.ENDPOINT_PARAM_NAME;
import static io.aeron.archive.client.AeronArchive.NULL_POSITION;
import static io.aeron.cluster.ClusterSession.State.*;
import static io.aeron.cluster.ConsensusModule.Configuration.SESSION_TIMEOUT_MSG;
import static io.aeron.cluster.ConsensusModule.SNAPSHOT_TYPE_ID;
import static org.agrona.concurrent.status.CountersReader.METADATA_LENGTH;
class SequencerAgent implements Agent, ServiceControlListener
{
private boolean isRecovered;
private final int memberId;
private int votedForMemberId = ClusterMember.NULL_MEMBER_ID;
private int leaderMemberId;
private int serviceAckCount = 0;
private int logSessionId;
private final long sessionTimeoutMs;
private final long heartbeatIntervalMs;
private final long heartbeatTimeoutMs;
private long nextSessionId = 1;
private long baseLogPosition = 0;
private long leadershipTermId = -1;
private long lastRecordingPosition = 0;
private long timeOfLastLogUpdateMs = 0;
private long followerCommitPosition = 0;
private long logRecordingId;
private ReadableCounter logRecordingPosition;
private Counter commitPosition;
private ConsensusModule.State state = ConsensusModule.State.INIT;
private Cluster.Role role;
private ClusterMember[] clusterMembers;
private ClusterMember leaderMember;
private final ClusterMember thisMember;
private long[] rankedPositions;
private final Counter clusterRoleCounter;
private final ClusterMarkFile markFile;
private final AgentInvoker aeronClientInvoker;
private final EpochClock epochClock;
private final CachedEpochClock cachedEpochClock = new CachedEpochClock();
private final Counter moduleState;
private final Counter controlToggle;
private final TimerService timerService;
private final ServiceControlAdapter serviceControlAdapter;
private final ServiceControlPublisher serviceControlPublisher;
private final IngressAdapter ingressAdapter;
private final EgressPublisher egressPublisher;
private final LogPublisher logPublisher;
private LogAdapter logAdapter;
private final MemberStatusAdapter memberStatusAdapter;
private final MemberStatusPublisher memberStatusPublisher = new MemberStatusPublisher();
private final Long2ObjectHashMap<ClusterSession> sessionByIdMap = new Long2ObjectHashMap<>();
private final ArrayList<ClusterSession> pendingSessions = new ArrayList<>();
private final ArrayList<ClusterSession> rejectedSessions = new ArrayList<>();
private final Authenticator authenticator;
private final SessionProxy sessionProxy;
private final Aeron aeron;
private AeronArchive archive;
private final ConsensusModule.Context ctx;
private final UnsafeBuffer tempBuffer = new UnsafeBuffer(new byte[METADATA_LENGTH]);
private final IdleStrategy idleStrategy;
private final LongArrayList failedTimerCancellations = new LongArrayList();
private RecordingLog.RecoveryPlan recoveryPlan;
SequencerAgent(
final ConsensusModule.Context ctx,
final EgressPublisher egressPublisher,
final LogPublisher logPublisher)
{
this.ctx = ctx;
this.aeron = ctx.aeron();
this.epochClock = ctx.epochClock();
this.sessionTimeoutMs = TimeUnit.NANOSECONDS.toMillis(ctx.sessionTimeoutNs());
this.heartbeatIntervalMs = TimeUnit.NANOSECONDS.toMillis(ctx.heartbeatIntervalNs());
this.heartbeatTimeoutMs = TimeUnit.NANOSECONDS.toMillis(ctx.heartbeatTimeoutNs());
this.egressPublisher = egressPublisher;
this.moduleState = ctx.moduleStateCounter();
this.controlToggle = ctx.controlToggleCounter();
this.logPublisher = logPublisher;
this.idleStrategy = ctx.idleStrategy();
this.timerService = new TimerService(this);
this.clusterMembers = ClusterMember.parse(ctx.clusterMembers());
this.sessionProxy = new SessionProxy(egressPublisher);
this.memberId = ctx.clusterMemberId();
this.leaderMemberId = ctx.appointedLeaderId();
this.clusterRoleCounter = ctx.clusterNodeCounter();
this.markFile = ctx.clusterMarkFile();
aeronClientInvoker = ctx.ownsAeronClient() ? ctx.aeron().conductorAgentInvoker() : null;
invokeAeronClient();
rankedPositions = new long[ClusterMember.quorumThreshold(clusterMembers.length)];
role(Cluster.Role.FOLLOWER);
thisMember = clusterMembers[memberId];
final ChannelUri memberStatusUri = ChannelUri.parse(ctx.memberStatusChannel());
memberStatusUri.put(ENDPOINT_PARAM_NAME, thisMember.memberFacingEndpoint());
final int statusStreamId = ctx.memberStatusStreamId();
memberStatusAdapter = new MemberStatusAdapter(
aeron.addSubscription(memberStatusUri.toString(), statusStreamId), this);
ClusterMember.addMemberStatusPublications(clusterMembers, thisMember, memberStatusUri, statusStreamId, aeron);
final ChannelUri ingressUri = ChannelUri.parse(ctx.ingressChannel());
if (!ingressUri.containsKey(ENDPOINT_PARAM_NAME))
{
ingressUri.put(ENDPOINT_PARAM_NAME, thisMember.clientFacingEndpoint());
}
ingressAdapter = new IngressAdapter(
aeron.addSubscription(ingressUri.toString(), ctx.ingressStreamId()), this, ctx.invalidRequestCounter());
serviceControlAdapter = new ServiceControlAdapter(
aeron.addSubscription(ctx.serviceControlChannel(), ctx.serviceControlStreamId()), this);
serviceControlPublisher = new ServiceControlPublisher(
aeron.addPublication(ctx.serviceControlChannel(), ctx.serviceControlStreamId()));
authenticator = ctx.authenticatorSupplier().newAuthenticator(ctx);
}
public void onClose()
{
CloseHelper.close(archive);
if (!ctx.ownsAeronClient())
{
for (final ClusterSession session : sessionByIdMap.values())
{
session.close();
}
CloseHelper.close(memberStatusAdapter);
ClusterMember.closeMemberPublications(clusterMembers);
logPublisher.disconnect();
CloseHelper.close(ingressAdapter);
CloseHelper.close(serviceControlPublisher);
CloseHelper.close(serviceControlAdapter);
}
}
public void onStart()
{
archive = AeronArchive.connect(ctx.archiveContext());
recoveryPlan = ctx.recordingLog().createRecoveryPlan(archive);
serviceAckCount = 0;
try (Counter ignore = addRecoveryStateCounter(recoveryPlan))
{
if (null != recoveryPlan.snapshotStep)
{
recoverFromSnapshot(recoveryPlan.snapshotStep, archive);
}
awaitServiceAcks();
if (recoveryPlan.termSteps.size() > 0)
{
recoverFromLog(recoveryPlan.termSteps, archive);
}
isRecovered = true;
}
state(ConsensusModule.State.ACTIVE); // TODO: handle suspended case
leadershipTermId++;
if (clusterMembers.length > 1)
{
electLeader();
}
if (memberId == leaderMemberId || clusterMembers.length == 1)
{
becomeLeader();
}
else
{
becomeFollower();
}
final long nowMs = epochClock.time();
cachedEpochClock.update(nowMs);
timeOfLastLogUpdateMs = nowMs;
ctx.recordingLog().appendTerm(logRecordingId, leadershipTermId, baseLogPosition, nowMs, leaderMemberId);
}
public int doWork()
{
int workCount = 0;
boolean isSlowTickCycle = false;
final long nowMs = epochClock.time();
if (cachedEpochClock.time() != nowMs)
{
isSlowTickCycle = true;
cachedEpochClock.update(nowMs);
}
switch (role)
{
case LEADER:
if (ConsensusModule.State.ACTIVE == state)
{
workCount += ingressAdapter.poll();
}
break;
case FOLLOWER:
if (ConsensusModule.State.ACTIVE == state || ConsensusModule.State.SUSPENDED == state)
{
workCount += logAdapter.poll(followerCommitPosition);
}
break;
}
workCount += memberStatusAdapter.poll();
workCount += updateMemberPosition(nowMs);
if (isSlowTickCycle)
{
workCount += slowTickCycle(nowMs);
}
return workCount;
}
public String roleName()
{
return "sequencer";
}
public void onServiceAck(
final long logPosition, final long leadershipTermId, final int serviceId, final ClusterAction action)
{
validateServiceAck(logPosition, leadershipTermId, serviceId, action);
if (++serviceAckCount == ctx.serviceCount())
{
final long termPosition = logPosition - baseLogPosition;
switch (action)
{
case SNAPSHOT:
ctx.snapshotCounter().incrementOrdered();
state(ConsensusModule.State.ACTIVE);
ClusterControl.ToggleState.reset(controlToggle);
final long nowNs = epochClock.time();
for (final ClusterSession session : sessionByIdMap.values())
{
session.timeOfLastActivityMs(nowNs);
}
break;
case SHUTDOWN:
ctx.snapshotCounter().incrementOrdered();
ctx.recordingLog().commitLeadershipTermPosition(leadershipTermId, termPosition);
state(ConsensusModule.State.CLOSED);
ctx.terminationHook().run();
break;
case ABORT:
ctx.recordingLog().commitLeadershipTermPosition(leadershipTermId, termPosition);
state(ConsensusModule.State.CLOSED);
ctx.terminationHook().run();
break;
}
}
else if (serviceAckCount > ctx.serviceCount())
{
throw new IllegalStateException("Service count exceeded: " + serviceAckCount);
}
}
public void onSessionConnect(
final long correlationId,
final int responseStreamId,
final String responseChannel,
final byte[] credentialData)
{
final long nowMs = cachedEpochClock.time();
final long sessionId = nextSessionId++;
final ClusterSession session = new ClusterSession(sessionId, responseStreamId, responseChannel);
session.connect(aeron);
session.lastActivity(nowMs, correlationId);
if (pendingSessions.size() + sessionByIdMap.size() < ctx.maxConcurrentSessions())
{
authenticator.onConnectRequest(sessionId, credentialData, nowMs);
pendingSessions.add(session);
}
else
{
rejectedSessions.add(session);
}
}
public void onSessionClose(final long clusterSessionId)
{
closeSession(clusterSessionId, CloseReason.USER_ACTION);
}
public ControlledFragmentAssembler.Action onSessionMessage(
final DirectBuffer buffer,
final int offset,
final int length,
final long clusterSessionId,
final long correlationId)
{
final long nowMs = cachedEpochClock.time();
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null == session || session.state() == CLOSED)
{
return ControlledFragmentHandler.Action.CONTINUE;
}
if (session.state() == OPEN && logPublisher.appendMessage(buffer, offset, length, nowMs))
{
session.lastActivity(nowMs, correlationId);
return ControlledFragmentHandler.Action.CONTINUE;
}
return ControlledFragmentHandler.Action.ABORT;
}
public void onSessionKeepAlive(final long clusterSessionId)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.timeOfLastActivityMs(cachedEpochClock.time());
}
}
public void onChallengeResponse(final long correlationId, final long clusterSessionId, final byte[] credentialData)
{
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (session.id() == clusterSessionId && session.state() == CHALLENGED)
{
final long nowMs = cachedEpochClock.time();
session.lastActivity(nowMs, correlationId);
authenticator.onChallengeResponse(clusterSessionId, credentialData, nowMs);
break;
}
}
}
public void onAdminQuery(final long correlationId, final long clusterSessionId, final AdminQueryType queryType)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session && session.state() == OPEN)
{
switch (queryType)
{
case ENDPOINTS:
final ChannelUri archiveChannelUri = ChannelUri.parse(ctx.archiveContext().controlRequestChannel());
final String endpointsDetail =
"id=" + Long.toString(thisMember.id()) +
",memberStatus=" + thisMember.memberFacingEndpoint() +
",log=" + thisMember.memberFacingEndpoint() +
",archive=" + archiveChannelUri.get(ENDPOINT_PARAM_NAME);
final long nowMs = cachedEpochClock.time();
session.lastActivity(nowMs, correlationId);
session.adminQueryResponseDetail(endpointsDetail);
if (egressPublisher.sendEvent(session, EventCode.OK, session.adminQueryResponseDetail()))
{
session.adminQueryResponseDetail(null);
}
break;
case RECORDING_LOG: // TODO: or should this really be recoveryPlan?
// TODO: send recordingLog as a byte[]
break;
}
}
}
public boolean onTimerEvent(final long correlationId, final long nowMs)
{
return logPublisher.appendTimerEvent(correlationId, nowMs);
}
public void onScheduleTimer(final long correlationId, final long deadlineMs)
{
timerService.scheduleTimer(correlationId, deadlineMs);
}
public void onCancelTimer(final long correlationId)
{
timerService.cancelTimer(correlationId);
}
public void onServiceCloseSession(final long clusterSessionId)
{
closeSession(clusterSessionId, CloseReason.SERVICE_ACTION);
}
void state(final ConsensusModule.State state)
{
this.state = state;
moduleState.set(state.code());
}
void role(final Cluster.Role role)
{
this.role = role;
clusterRoleCounter.setOrdered(role.code());
}
void logRecordingPositionCounter(final ReadableCounter logRecordingPosition)
{
this.logRecordingPosition = logRecordingPosition;
}
void commitPositionCounter(final Counter commitPosition)
{
this.commitPosition = commitPosition;
}
@SuppressWarnings("unused")
void onReplaySessionMessage(
final long correlationId,
final long clusterSessionId,
final long timestamp,
final DirectBuffer buffer,
final int offset,
final int length,
final Header header)
{
cachedEpochClock.update(timestamp);
sessionByIdMap.get(clusterSessionId).lastActivity(timestamp, correlationId);
}
void onReplayTimerEvent(final long correlationId, final long timestamp)
{
cachedEpochClock.update(timestamp);
if (!timerService.cancelTimer(correlationId))
{
failedTimerCancellations.addLong(correlationId);
}
}
void onReplaySessionOpen(
final long termPosition,
final long correlationId,
final long clusterSessionId,
final long timestamp,
final int responseStreamId,
final String responseChannel)
{
cachedEpochClock.update(timestamp);
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.open(termPosition);
session.lastActivity(timestamp, correlationId);
sessionByIdMap.put(clusterSessionId, session);
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
void onLoadSession(
final long termPosition,
final long correlationId,
final long clusterSessionId,
final long timestamp,
final CloseReason closeReason,
final int responseStreamId,
final String responseChannel)
{
final ClusterSession session = new ClusterSession(clusterSessionId, responseStreamId, responseChannel);
session.closeReason(closeReason);
session.open(termPosition);
session.lastActivity(timestamp, correlationId);
if (CloseReason.NULL_VAL != closeReason)
{
session.close();
}
sessionByIdMap.put(clusterSessionId, session);
if (clusterSessionId >= nextSessionId)
{
nextSessionId = clusterSessionId + 1;
}
}
@SuppressWarnings("unused")
void onReplaySessionClose(
final long correlationId,
final long clusterSessionId,
final long timestamp,
final CloseReason closeReason)
{
cachedEpochClock.update(timestamp);
sessionByIdMap.remove(clusterSessionId).close();
}
@SuppressWarnings("unused")
void onReplayClusterAction(
final long logPosition, final long leadershipTermId, final long timestamp, final ClusterAction action)
{
cachedEpochClock.update(timestamp);
final long termPosition = logPosition - baseLogPosition;
switch (action)
{
case SUSPEND:
state(ConsensusModule.State.SUSPENDED);
break;
case RESUME:
state(ConsensusModule.State.ACTIVE);
break;
case SNAPSHOT:
if (isRecovered)
{
serviceAckCount = 0;
state(ConsensusModule.State.SNAPSHOT);
takeSnapshot(timestamp, termPosition);
}
break;
case SHUTDOWN:
if (isRecovered)
{
serviceAckCount = 0;
state(ConsensusModule.State.SHUTDOWN);
takeSnapshot(timestamp, termPosition);
}
break;
case ABORT:
if (isRecovered)
{
serviceAckCount = 0;
state(ConsensusModule.State.ABORT);
}
break;
}
}
void onReloadState(final long nextSessionId)
{
this.nextSessionId = nextSessionId;
}
void onRequestVote(
final long candidateTermId,
final long lastBaseLogPosition,
final long lastTermPosition,
final int candidateId)
{
if (Cluster.Role.FOLLOWER == role &&
candidateTermId == leadershipTermId &&
lastBaseLogPosition == recoveryPlan.lastLogPosition)
{
final boolean vote = lastTermPosition >= recoveryPlan.lastTermPositionAppended;
sendVote(candidateTermId, lastBaseLogPosition, lastTermPosition, candidateId, vote);
if (!vote)
{
// TODO: become candidate in new election
throw new IllegalStateException("Invalid member for cluster leader: " + candidateId);
}
else
{
votedForMemberId = candidateId;
if (recoveryPlan.lastTermPositionAppended < lastTermPosition)
{
// TODO: need to catch up with leader
}
}
}
else
{
sendVote(candidateTermId, lastBaseLogPosition, lastTermPosition, candidateId, false);
}
}
void onVote(
final long candidateTermId,
final long lastBaseLogPosition,
final long lastTermPosition,
final int candidateMemberId,
final int followerMemberId,
final boolean vote)
{
if (Cluster.Role.FOLLOWER == role &&
candidateTermId == leadershipTermId &&
lastBaseLogPosition == recoveryPlan.lastLogPosition &&
lastTermPosition == recoveryPlan.lastTermPositionAppended &&
candidateMemberId == memberId)
{
if (vote)
{
clusterMembers[followerMemberId].votedForId(candidateMemberId);
}
else
{
throw new IllegalStateException("Invalid member for cluster leader: " + candidateMemberId);
}
}
}
void onAppendedPosition(final long termPosition, final long leadershipTermId, final int followerMemberId)
{
if (leadershipTermId == this.leadershipTermId)
{
clusterMembers[followerMemberId].termPosition(termPosition);
}
}
void onCommitPosition(
final long termPosition, final long leadershipTermId, final int leaderMemberId, final int logSessionId)
{
if (leadershipTermId == this.leadershipTermId)
{
if (leaderMemberId != this.leaderMemberId)
{
throw new IllegalStateException("Commit position not for current leader: expected=" +
this.leaderMemberId + " received=" + leaderMemberId);
}
if (0 == termPosition && leaderMemberId == votedForMemberId && this.logSessionId != logSessionId)
{
this.logSessionId = logSessionId;
}
timeOfLastLogUpdateMs = cachedEpochClock.time();
followerCommitPosition = termPosition;
}
}
private void closeSession(final long clusterSessionId, final CloseReason closeReason)
{
final ClusterSession session = sessionByIdMap.get(clusterSessionId);
if (null != session)
{
session.closeReason(closeReason);
session.close();
if (appendClosedSession(session, cachedEpochClock.time()))
{
sessionByIdMap.remove(clusterSessionId);
}
}
}
private int slowTickCycle(final long nowMs)
{
int workCount = 0;
markFile.updateActivityTimestamp(nowMs);
workCount += invokeAeronClient();
workCount += serviceControlAdapter.poll();
if (Cluster.Role.LEADER == role)
{
workCount += checkControlToggle(nowMs);
if (ConsensusModule.State.ACTIVE == state)
{
workCount += processPendingSessions(pendingSessions, nowMs);
workCount += checkSessions(sessionByIdMap, nowMs);
workCount += processRejectedSessions(rejectedSessions, nowMs);
workCount += timerService.poll(nowMs);
}
}
if (null != archive)
{
archive.checkForErrorResponse();
}
return workCount;
}
private int checkControlToggle(final long nowMs)
{
switch (ClusterControl.ToggleState.get(controlToggle))
{
case SUSPEND:
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SUSPEND, nowMs))
{
state(ConsensusModule.State.SUSPENDED);
ClusterControl.ToggleState.reset(controlToggle);
}
break;
case RESUME:
if (ConsensusModule.State.SUSPENDED == state && appendAction(ClusterAction.RESUME, nowMs))
{
state(ConsensusModule.State.ACTIVE);
ClusterControl.ToggleState.reset(controlToggle);
}
break;
case SNAPSHOT:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SNAPSHOT, nowMs))
{
state(ConsensusModule.State.SNAPSHOT);
takeSnapshot(nowMs, logPublisher.position());
}
break;
case SHUTDOWN:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.SHUTDOWN, nowMs))
{
state(ConsensusModule.State.SHUTDOWN);
takeSnapshot(nowMs, logPublisher.position());
}
break;
case ABORT:
serviceAckCount = 0;
if (ConsensusModule.State.ACTIVE == state && appendAction(ClusterAction.ABORT, nowMs))
{
state(ConsensusModule.State.ABORT);
}
break;
default:
return 0;
}
return 1;
}
private void sendVote(
final long candidateTermId,
final long lastBaseLogPosition,
final long lastTermPosition,
final int candidateId,
final boolean vote)
{
idleStrategy.reset();
while (!memberStatusPublisher.vote(
clusterMembers[candidateId].publication(),
candidateTermId,
lastBaseLogPosition,
lastTermPosition,
candidateId,
memberId,
vote))
{
idle();
}
}
private boolean appendAction(final ClusterAction action, final long nowMs)
{
final long position = baseLogPosition +
logPublisher.position() +
MessageHeaderEncoder.ENCODED_LENGTH +
ClusterActionRequestEncoder.BLOCK_LENGTH;
return logPublisher.appendClusterAction(action, leadershipTermId, position, nowMs);
}
private int processPendingSessions(final ArrayList<ClusterSession> pendingSessions, final long nowMs)
{
int workCount = 0;
for (int lastIndex = pendingSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = pendingSessions.get(i);
if (session.state() == INIT || session.state() == CONNECTED)
{
if (session.isResponsePublicationConnected())
{
session.state(CONNECTED);
authenticator.onProcessConnectedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == CHALLENGED)
{
if (session.isResponsePublicationConnected())
{
authenticator.onProcessChallengedSession(sessionProxy.session(session), nowMs);
}
}
if (session.state() == AUTHENTICATED)
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.timeOfLastActivityMs(nowMs);
sessionByIdMap.put(session.id(), session);
appendConnectedSession(session, nowMs);
workCount += 1;
}
else if (session.state() == REJECTED)
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
rejectedSessions.add(session);
}
else if (nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
ArrayListUtil.fastUnorderedRemove(pendingSessions, i, lastIndex--);
session.close();
}
}
return workCount;
}
private int processRejectedSessions(final ArrayList<ClusterSession> rejectedSessions, final long nowMs)
{
int workCount = 0;
for (int lastIndex = rejectedSessions.size() - 1, i = lastIndex; i >= 0; i--)
{
final ClusterSession session = rejectedSessions.get(i);
String detail = ConsensusModule.Configuration.SESSION_LIMIT_MSG;
EventCode eventCode = EventCode.ERROR;
if (session.state() == REJECTED)
{
detail = ConsensusModule.Configuration.SESSION_REJECTED_MSG;
eventCode = EventCode.AUTHENTICATION_REJECTED;
}
if (egressPublisher.sendEvent(session, eventCode, detail) ||
nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
ArrayListUtil.fastUnorderedRemove(rejectedSessions, i, lastIndex--);
session.close();
workCount++;
}
}
return workCount;
}
private int checkSessions(final Long2ObjectHashMap<ClusterSession> sessionByIdMap, final long nowMs)
{
int workCount = 0;
for (final Iterator<ClusterSession> i = sessionByIdMap.values().iterator(); i.hasNext(); )
{
final ClusterSession session = i.next();
final ClusterSession.State state = session.state();
if (nowMs > (session.timeOfLastActivityMs() + sessionTimeoutMs))
{
switch (state)
{
case OPEN:
egressPublisher.sendEvent(session, EventCode.ERROR, SESSION_TIMEOUT_MSG);
session.closeReason(CloseReason.TIMEOUT);
session.close();
if (appendClosedSession(session, nowMs))
{
i.remove();
}
break;
case CLOSED:
if (appendClosedSession(session, nowMs))
{
session.close();
i.remove();
}
break;
default:
session.close();
i.remove();
}
workCount += 1;
}
else if (state == CONNECTED)
{
appendConnectedSession(session, nowMs);
workCount += 1;
}
else if (state == OPEN && session.adminQueryResponseDetail() != null)
{
if (egressPublisher.sendEvent(session, EventCode.OK, session.adminQueryResponseDetail()))
{
session.adminQueryResponseDetail(null);
}
}
}
return workCount;
}
private void appendConnectedSession(final ClusterSession session, final long nowMs)
{
final long resultingPosition = logPublisher.appendConnectedSession(session, nowMs);
if (resultingPosition > 0)
{
session.open(resultingPosition);
}
}
private boolean appendClosedSession(final ClusterSession session, final long nowMs)
{
if (logPublisher.appendClosedSession(session, nowMs))
{
session.close();
return true;
}
return false;
}
private void electLeader()
{
awaitConnectedMembers();
if (ctx.appointedLeaderId() == memberId)
{
role(Cluster.Role.CANDIDATE);
ClusterMember.becomeCandidate(clusterMembers, memberId);
votedForMemberId = memberId;
for (final ClusterMember member : clusterMembers)
{
idleStrategy.reset();
while (!memberStatusPublisher.requestVote(
member.publication(),
leadershipTermId,
recoveryPlan.lastLogPosition,
recoveryPlan.lastTermPositionAppended,
memberId))
{
idle();
}
}
do
{
idle(memberStatusAdapter.poll());
}
while (ClusterMember.awaitingVotes(clusterMembers));
leaderMemberId = memberId;
leaderMember = thisMember;
}
else
{
votedForMemberId = ClusterMember.NULL_MEMBER_ID;
do
{
idle(memberStatusAdapter.poll());
}
while (ClusterMember.NULL_MEMBER_ID == leaderMemberId);
}
}
private void awaitConnectedMembers()
{
idleStrategy.reset();
while (true)
{
if (ClusterMember.arePublicationsConnected(clusterMembers))
{
break;
}
idle();
}
}
private void becomeLeader()
{
updateMemberDetails(leaderMemberId);
role(Cluster.Role.LEADER);
final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel());
final Publication publication = aeron.addExclusivePublication(ctx.logChannel(), ctx.logStreamId());
if (!channelUri.containsKey(CommonContext.ENDPOINT_PARAM_NAME))
{
final ChannelUriStringBuilder builder = new ChannelUriStringBuilder().media("udp");
for (final ClusterMember member : clusterMembers)
{
if (member.id() != memberId)
{
publication.addDestination(builder.endpoint(member.logEndpoint()).build());
}
}
}
logAdapter = null;
logPublisher.connect(publication);
logSessionId = publication.sessionId();
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(logSessionId));
final String recordingChannel = channelUri.toString();
archive.startRecording(recordingChannel, ctx.logStreamId(), SourceLocation.LOCAL);
createPositionCounters();
awaitServicesReady(channelUri, true);
awaitFollowersReady();
final long nowMs = epochClock.time();
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() != CLOSED)
{
session.connect(aeron);
session.timeOfLastActivityMs(nowMs);
}
}
}
private void becomeFollower()
{
leaderMember = clusterMembers[leaderMemberId];
followerCommitPosition = NULL_POSITION;
updateMemberDetails(leaderMemberId);
role(Cluster.Role.FOLLOWER);
while (NULL_POSITION == followerCommitPosition)
{
final int fragments = memberStatusAdapter.poll();
idle(fragments);
}
final ChannelUri channelUri = ChannelUri.parse(ctx.logChannel());
channelUri.put(CommonContext.ENDPOINT_PARAM_NAME, thisMember.logEndpoint());
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(logSessionId));
final String logChannel = channelUri.toString();
final int streamId = ctx.logStreamId();
archive.startRecording(logChannel, streamId, SourceLocation.REMOTE);
logAdapter = new LogAdapter(awaitImage(logSessionId, aeron.addSubscription(logChannel, streamId)), this);
createPositionCounters();
awaitServicesReady(channelUri, false);
}
private void awaitFollowersReady()
{
ClusterMember.resetTermPositions(clusterMembers, -1);
clusterMembers[memberId].termPosition(logRecordingPosition.get());
do
{
final long nowMs = epochClock.time();
if (nowMs > (timeOfLastLogUpdateMs + heartbeatIntervalMs))
{
timeOfLastLogUpdateMs = nowMs;
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
memberStatusPublisher.commitPosition(
member.publication(), baseLogPosition, leadershipTermId, memberId, logSessionId);
}
}
}
idle(memberStatusAdapter.poll());
}
while (!ClusterMember.hasReachedPosition(clusterMembers, 0));
}
private void createPositionCounters()
{
final CountersReader counters = aeron.countersReader();
final int recordingCounterId = awaitRecordingCounter(counters, logSessionId);
logRecordingPosition = new ReadableCounter(counters, recordingCounterId);
logRecordingId = RecordingPos.getRecordingId(counters, logRecordingPosition.counterId());
commitPosition = CommitPos.allocate(
aeron, tempBuffer, logRecordingId, baseLogPosition, leadershipTermId, logSessionId);
}
private void awaitServicesReady(final ChannelUri channelUri, final boolean isLeader)
{
serviceAckCount = 0;
final String channel = isLeader ? channelUri.prefix(SPY_QUALIFIER).toString() : channelUri.toString();
serviceControlPublisher.joinLog(
leadershipTermId, commitPosition.id(), logSessionId, ctx.logStreamId(), channel);
awaitServiceAcks();
}
private void updateMemberDetails(final int leaderMemberId)
{
for (final ClusterMember clusterMember : clusterMembers)
{
clusterMember.isLeader(clusterMember.id() == leaderMemberId);
}
updateClusterMemberDetails(clusterMembers);
}
private void recoverFromSnapshot(final RecordingLog.ReplayStep snapshotStep, final AeronArchive archive)
{
final RecordingLog.Entry snapshot = snapshotStep.entry;
cachedEpochClock.update(snapshot.timestamp);
baseLogPosition = snapshot.logPosition;
leadershipTermId = snapshot.leadershipTermId;
final long recordingId = snapshot.recordingId;
final RecordingExtent recordingExtent = new RecordingExtent();
if (0 == archive.listRecording(recordingId, recordingExtent))
{
throw new IllegalStateException("Could not find recordingId: " + recordingId);
}
final String channel = ctx.replayChannel();
final int streamId = ctx.replayStreamId();
final long length = recordingExtent.stopPosition - recordingExtent.startPosition;
final int sessionId = (int)archive.startReplay(recordingId, 0, length, channel, streamId);
final String replaySubscriptionChannel = ChannelUri.addSessionId(channel, sessionId);
try (Subscription subscription = aeron.addSubscription(replaySubscriptionChannel, streamId))
{
final Image image = awaitImage(sessionId, subscription);
final SnapshotLoader snapshotLoader = new SnapshotLoader(image, this);
while (true)
{
final int fragments = snapshotLoader.poll();
if (fragments == 0)
{
if (snapshotLoader.isDone())
{
break;
}
if (image.isClosed())
{
throw new IllegalStateException("Snapshot ended unexpectedly");
}
}
idle(fragments);
}
}
}
private Image awaitImage(final int sessionId, final Subscription subscription)
{
idleStrategy.reset();
Image image;
while ((image = subscription.imageBySessionId(sessionId)) == null)
{
idle();
}
return image;
}
private void recoverFromLog(final List<RecordingLog.ReplayStep> steps, final AeronArchive archive)
{
final int streamId = ctx.replayStreamId();
final ChannelUri channelUri = ChannelUri.parse(ctx.replayChannel());
for (int i = 0, size = steps.size(); i < size; i++)
{
final RecordingLog.ReplayStep step = steps.get(i);
final RecordingLog.Entry entry = step.entry;
final long recordingId = entry.recordingId;
final long startPosition = step.recordingStartPosition;
final long stopPosition = step.recordingStopPosition;
final long length = NULL_POSITION == stopPosition ? Long.MAX_VALUE : stopPosition - startPosition;
final long logPosition = entry.logPosition;
if (logPosition != baseLogPosition)
{
throw new IllegalStateException("base position for log not as expected: expected " +
baseLogPosition + " actual is " + logPosition + ", " + step);
}
leadershipTermId = entry.leadershipTermId;
channelUri.put(CommonContext.SESSION_ID_PARAM_NAME, Integer.toString(i));
final String channel = channelUri.toString();
try (Counter counter = CommitPos.allocate(
aeron, tempBuffer, recordingId, logPosition, leadershipTermId, i);
Subscription subscription = aeron.addSubscription(channel, streamId))
{
counter.setOrdered(stopPosition);
serviceAckCount = 0;
serviceControlPublisher.joinLog(leadershipTermId, counter.id(), i, streamId, channel);
awaitServiceAcks();
final int sessionId = (int)archive.startReplay(recordingId, startPosition, length, channel, streamId);
if (i != sessionId)
{
throw new IllegalStateException("Session id not for iteration: " + sessionId);
}
final Image image = awaitImage(sessionId, subscription);
serviceAckCount = 0;
replayTerm(image, stopPosition);
awaitServiceAcks();
final long termPosition = image.position();
if (step.entry.termPosition < termPosition)
{
ctx.recordingLog().commitLeadershipTermPosition(leadershipTermId, termPosition);
}
baseLogPosition += termPosition;
failedTimerCancellations.forEachOrderedLong(timerService::cancelTimer);
failedTimerCancellations.clear();
}
}
failedTimerCancellations.trimToSize();
}
private Counter addRecoveryStateCounter(final RecordingLog.RecoveryPlan plan)
{
final int termCount = plan.termSteps.size();
final RecordingLog.ReplayStep snapshotStep = plan.snapshotStep;
if (null != snapshotStep)
{
final RecordingLog.Entry snapshot = snapshotStep.entry;
return RecoveryState.allocate(
aeron, tempBuffer, snapshot.leadershipTermId, snapshot.termPosition, snapshot.timestamp, termCount);
}
return RecoveryState.allocate(aeron, tempBuffer, leadershipTermId, NULL_POSITION, 0, termCount);
}
private void awaitServiceAcks()
{
while (true)
{
final int fragmentsRead = serviceControlAdapter.poll();
if (serviceAckCount >= ctx.serviceCount())
{
break;
}
idle(fragmentsRead);
}
}
private void validateServiceAck(
final long logPosition, final long leadershipTermId, final int serviceId, final ClusterAction action)
{
final long currentLogPosition = baseLogPosition + currentTermPosition();
if (logPosition != currentLogPosition || leadershipTermId != this.leadershipTermId)
{
throw new IllegalStateException("Invalid log state:" +
" serviceId=" + serviceId +
", logPosition=" + logPosition + " current is " + currentLogPosition +
", leadershipTermId=" + leadershipTermId + " current is " + this.leadershipTermId);
}
if (!state.isValid(action))
{
throw new IllegalStateException("Invalid action ack for state " + state + " action " + action);
}
}
private long currentTermPosition()
{
return null != logAdapter ? logAdapter.position() : logPublisher.position();
}
private void updateClusterMemberDetails(final ClusterMember[] members)
{
int leaderIndex = 0;
for (int i = 0, length = members.length; i < length; i++)
{
if (members[i].isLeader())
{
leaderIndex = i;
break;
}
}
final StringBuilder builder = new StringBuilder(100);
builder.append(members[leaderIndex].clientFacingEndpoint());
for (int i = 0, length = members.length; i < length; i++)
{
if (i != leaderIndex)
{
builder.append(',').append(members[i].clientFacingEndpoint());
}
}
sessionProxy.memberEndpointsDetail(builder.toString());
}
private int updateMemberPosition(final long nowMs)
{
int workCount = 0;
switch (role)
{
case LEADER:
{
thisMember.termPosition(logRecordingPosition.get());
final long position = ClusterMember.quorumPosition(clusterMembers, rankedPositions);
if (position > commitPosition.getWeak() || nowMs >= (timeOfLastLogUpdateMs + heartbeatIntervalMs))
{
for (final ClusterMember member : clusterMembers)
{
if (member != thisMember)
{
memberStatusPublisher.commitPosition(
member.publication(), position, leadershipTermId, memberId, logSessionId);
}
}
commitPosition.setOrdered(position);
timeOfLastLogUpdateMs = nowMs;
workCount = 1;
}
break;
}
case FOLLOWER:
{
final long recordingPosition = logRecordingPosition.get();
if (recordingPosition != lastRecordingPosition)
{
final Publication publication = leaderMember.publication();
if (memberStatusPublisher.appendedPosition(
publication, recordingPosition, leadershipTermId, memberId))
{
lastRecordingPosition = recordingPosition;
}
workCount = 1;
}
commitPosition.proposeMaxOrdered(logAdapter.position());
if (nowMs >= (timeOfLastLogUpdateMs + heartbeatTimeoutMs))
{
throw new AgentTerminationException("No heartbeat detected from cluster leader");
}
break;
}
}
return workCount;
}
private void idle()
{
checkInterruptedStatus();
invokeAeronClient();
idleStrategy.idle();
}
private void idle(final int workCount)
{
checkInterruptedStatus();
invokeAeronClient();
idleStrategy.idle(workCount);
}
private static void checkInterruptedStatus()
{
if (Thread.currentThread().isInterrupted())
{
throw new RuntimeException("Unexpected interrupt");
}
}
private int invokeAeronClient()
{
int workCount = 0;
if (null != aeronClientInvoker)
{
workCount += aeronClientInvoker.invoke();
}
return workCount;
}
private void takeSnapshot(final long timestampMs, final long termPosition)
{
final long recordingId;
final long logPosition = baseLogPosition + termPosition;
final String channel = ctx.snapshotChannel();
final int streamId = ctx.snapshotStreamId();
try (Publication publication = archive.addRecordedExclusivePublication(channel, streamId))
{
try
{
final CountersReader counters = aeron.countersReader();
final int counterId = awaitRecordingCounter(counters, publication.sessionId());
recordingId = RecordingPos.getRecordingId(counters, counterId);
snapshotState(publication, logPosition, leadershipTermId);
awaitRecordingComplete(recordingId, publication.position(), counters, counterId);
}
finally
{
archive.stopRecording(publication);
}
}
ctx.recordingLog().appendSnapshot(recordingId, leadershipTermId, baseLogPosition, termPosition, timestampMs);
}
private void awaitRecordingComplete(
final long recordingId, final long completePosition, final CountersReader counters, final int counterId)
{
idleStrategy.reset();
do
{
idle();
if (!RecordingPos.isActive(counters, counterId, recordingId))
{
throw new IllegalStateException("Recording has stopped unexpectedly: " + recordingId);
}
}
while (counters.getCounterValue(counterId) < completePosition);
}
private int awaitRecordingCounter(final CountersReader counters, final int sessionId)
{
idleStrategy.reset();
int counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
while (CountersReader.NULL_COUNTER_ID == counterId)
{
idle();
counterId = RecordingPos.findCounterIdBySession(counters, sessionId);
}
return counterId;
}
private void snapshotState(final Publication publication, final long logPosition, final long leadershipTermId)
{
final ConsensusModuleSnapshotTaker snapshotTaker = new ConsensusModuleSnapshotTaker(
publication, idleStrategy, aeronClientInvoker);
snapshotTaker.markBegin(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0);
for (final ClusterSession session : sessionByIdMap.values())
{
if (session.state() == OPEN)
{
snapshotTaker.snapshotSession(session);
}
}
invokeAeronClient();
timerService.snapshot(snapshotTaker);
snapshotTaker.sequencerState(nextSessionId);
snapshotTaker.markEnd(SNAPSHOT_TYPE_ID, logPosition, leadershipTermId, 0);
}
private void replayTerm(final Image image, final long finalTermPosition)
{
logAdapter = new LogAdapter(image, this);
while (true)
{
final int fragments = logAdapter.poll(finalTermPosition);
if (fragments == 0)
{
if (image.isClosed())
{
if (!image.isEndOfStream())
{
throw new IllegalStateException("Unexpected close");
}
break;
}
}
idle(fragments);
}
}
}
|
[Java] Only process service request to close a session if in the leader role.
|
aeron-cluster/src/main/java/io/aeron/cluster/SequencerAgent.java
|
[Java] Only process service request to close a session if in the leader role.
|
|
Java
|
apache-2.0
|
05c2ce578c567e739e5e9d967370369cbd90878b
| 0
|
h2oai/h2o,vbelakov/h2o,h2oai/h2o-2,calvingit21/h2o-2,calvingit21/h2o-2,h2oai/h2o-2,elkingtonmcb/h2o-2,h2oai/h2o,h2oai/h2o,h2oai/h2o,vbelakov/h2o,calvingit21/h2o-2,elkingtonmcb/h2o-2,rowhit/h2o-2,111t8e/h2o-2,vbelakov/h2o,111t8e/h2o-2,h2oai/h2o-2,111t8e/h2o-2,eg-zhang/h2o-2,vbelakov/h2o,eg-zhang/h2o-2,rowhit/h2o-2,100star/h2o,elkingtonmcb/h2o-2,111t8e/h2o-2,h2oai/h2o,calvingit21/h2o-2,elkingtonmcb/h2o-2,h2oai/h2o-2,h2oai/h2o-2,elkingtonmcb/h2o-2,eg-zhang/h2o-2,h2oai/h2o-2,h2oai/h2o-2,rowhit/h2o-2,calvingit21/h2o-2,111t8e/h2o-2,h2oai/h2o-2,eg-zhang/h2o-2,h2oai/h2o,calvingit21/h2o-2,111t8e/h2o-2,100star/h2o,h2oai/h2o,h2oai/h2o,h2oai/h2o-2,eg-zhang/h2o-2,100star/h2o,elkingtonmcb/h2o-2,111t8e/h2o-2,rowhit/h2o-2,rowhit/h2o-2,vbelakov/h2o,vbelakov/h2o,100star/h2o,eg-zhang/h2o-2,rowhit/h2o-2,rowhit/h2o-2,eg-zhang/h2o-2,vbelakov/h2o,111t8e/h2o-2,vbelakov/h2o,100star/h2o,vbelakov/h2o,eg-zhang/h2o-2,h2oai/h2o,rowhit/h2o-2,100star/h2o,100star/h2o,eg-zhang/h2o-2,calvingit21/h2o-2,h2oai/h2o-2,100star/h2o,rowhit/h2o-2,h2oai/h2o,elkingtonmcb/h2o-2,calvingit21/h2o-2,rowhit/h2o-2,111t8e/h2o-2,elkingtonmcb/h2o-2,eg-zhang/h2o-2,elkingtonmcb/h2o-2,vbelakov/h2o,calvingit21/h2o-2,111t8e/h2o-2,elkingtonmcb/h2o-2,calvingit21/h2o-2,100star/h2o
|
package hex.glm;
import hex.FrameTask.DataInfo;
import hex.GridSearch.GridSearchProgress;
import hex.glm.GLMModel.GLMValidationTask;
import hex.glm.GLMModel.GLMXValidationTask;
import hex.glm.GLMParams.CaseMode;
import hex.glm.GLMParams.Family;
import hex.glm.GLMParams.Link;
import hex.glm.GLMTask.GLMIterationTask;
import hex.glm.GLMTask.LMAXTask;
import hex.glm.GLMTask.YMUTask;
import hex.glm.LSMSolver.ADMMSolver;
import java.text.DecimalFormat;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import jsr166y.CountedCompleter;
import water.*;
import water.H2O.H2OCallback;
import water.H2O.H2OCountedCompleter;
import water.H2O.JobCompleter;
import water.Job.ModelJob;
import water.api.DocGen;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.*;
public class GLM2 extends ModelJob {
public final String _jobName;
// private transient GLM2 [] _subjobs;
// private Key _parentjob;
@API(help = "max-iterations", filter = Default.class, lmin=1, lmax=1000000)
int max_iter = 50;
@API(help = "If true, data will be standardized on the fly when computing the model.", filter = Default.class)
boolean standardize = true;
@API(help = "validation folds", filter = Default.class, lmin=0, lmax=100)
int n_folds;
@API(help = "Family.", filter = Default.class)
Family family = Family.gaussian;
private DataInfo _dinfo;
private GLMParams _glm;
private double [] _wgiven;
private double _proximalPenalty;
private double [] _beta;
private boolean _runAllLambdas = true;
// @API(help = "Link.", filter = Default.class)
Link link = Link.identity;
@API(help = "CaseMode", filter = Default.class)
CaseMode case_mode = CaseMode.none;
@API(help = "CaseMode", filter = Default.class)
double case_val = 0;
@API(help = "Tweedie variance power", filter = Default.class)
double tweedie_variance_power;
double tweedie_link_power;
@API(help = "alpha", filter = Default.class)
double [] alpha = new double[]{0.5};
// @API(help = "lambda", filter = RSeq2.class)
@API(help = "lambda", filter = Default.class)
double [] lambda;// = new double[]{1e-5};
public static final double DEFAULT_BETA_EPS = 1e-4;
@API(help = "beta_eps", filter = Default.class)
double beta_epsilon = DEFAULT_BETA_EPS;
int _lambdaIdx = 0;
@Override public Key defaultDestKey(){
return null;
}
@Override public Key defaultJobKey() {return null;}
public GLM2() {_jobName = "";}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda){
this(desc,jobKey,dest,dinfo,glm,lambda,0.5,0);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha){
this(desc,jobKey,dest,dinfo,glm,lambda,alpha,0);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha, int nfolds){
this(desc,jobKey,dest,dinfo,glm,lambda,0.5,nfolds,DEFAULT_BETA_EPS);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha,int nfolds, double betaEpsilon){
this(desc,jobKey,dest,dinfo,glm,lambda,alpha,nfolds,betaEpsilon,null);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha, int nfolds, double betaEpsilon, Key parentJob){
this(desc,jobKey,dest,dinfo,glm,lambda,alpha,nfolds,betaEpsilon,parentJob, null,0);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha, int nfolds, double betaEpsilon, Key parentJob, double [] beta, double proximalPenalty) {
assert beta == null || beta.length == (dinfo.fullN()+1):"unexpected size of beta, got length " + beta.length + ", expected " + dinfo.fullN();
job_key = jobKey;
description = desc;
destination_key = dest;
this.beta_epsilon = betaEpsilon;
_beta = beta;
_dinfo = dinfo;
_glm = glm;
this.lambda = lambda;
_beta = beta;
if((_proximalPenalty = proximalPenalty) != 0)
_wgiven = beta;
this.alpha= new double[]{alpha};
this.n_folds = nfolds;
source = dinfo._adaptedFrame;
response = dinfo._adaptedFrame.lastVec();
_jobName = dest.toString() + ((nfolds > 1)?("[" + dinfo._foldId + "]"):"");
}
static String arrayToString (double[] arr) {
if (arr == null) {
return "(null)";
}
StringBuffer sb = new StringBuffer();
for (int i = 0; i < arr.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(arr[i]);
}
return sb.toString();
}
@Override protected void logStart() {
Log.info("Starting GLM2 model build...");
super.logStart();
Log.info(" max_iter: ", max_iter);
Log.info(" standardize: ", standardize);
Log.info(" n_folds: ", n_folds);
Log.info(" family: ", family);
Log.info(" wgiven: " + arrayToString(_wgiven));
Log.info(" proximalPenalty: " + _proximalPenalty);
Log.info(" runAllLambdas: " + _runAllLambdas);
Log.info(" link: " + link);
Log.info(" case_mode: " + case_mode);
Log.info(" case_val: " + case_val);
Log.info(" tweedie_variance_power: " + tweedie_variance_power);
Log.info(" tweedie_link_power: " + tweedie_link_power);
Log.info(" alpha: " + arrayToString(alpha));
Log.info(" lambda: " + arrayToString(lambda));
Log.info(" beta_epsilon: " + beta_epsilon);
Log.info(" description: " + description);
}
public GLM2 setCase(CaseMode cm, double cv){
case_mode = cm;
case_val = cv;
return this;
}
/** Return the query link to this page */
public static String link(Key k, String content) {
RString rs = new RString("<a href='GLM2.query?source=%$key'>%content</a>");
rs.replace("key", k.toString());
rs.replace("content", content);
return rs.toString();
}
public static Job gridSearch(Key jobKey, Key destinationKey, DataInfo dinfo, GLMParams glm, double [] lambda, double [] alpha, int nfolds){
return gridSearch(jobKey, destinationKey, dinfo, glm, lambda, alpha,nfolds,DEFAULT_BETA_EPS);
}
public static Job gridSearch(Key jobKey, Key destinationKey, DataInfo dinfo, GLMParams glm, double [] lambda, double [] alpha, int nfolds, double betaEpsilon){
return new GLMGridSearch(4, jobKey, destinationKey,dinfo,glm,lambda,alpha, nfolds,betaEpsilon).fork();
}
@Override protected Response serve() {
init();
link = family.defaultLink;// TODO
tweedie_link_power = 1 - tweedie_variance_power;// TODO
Frame fr = new Frame(source._names.clone(),source.vecs().clone());
fr.remove(ignored_cols);
final Vec [] vecs = fr.vecs();
ArrayList<Integer> constantOrNAs = new ArrayList<Integer>();
for(int i = 0; i < vecs.length-1; ++i)// put response to the end
if(vecs[i] == response){
fr.add(fr._names[i], fr.remove(i));
break;
}
for(int i = 0; i < vecs.length-1; ++i) // remove constant cols and cols with too many NAs
if(vecs[i].min() == vecs[i].max() || vecs[i].naCnt() > vecs[i].length()*0.2)constantOrNAs.add(i);
if(!constantOrNAs.isEmpty()){
int [] cols = new int[constantOrNAs.size()];
for(int i = 0; i < cols.length; ++i)cols[i] = constantOrNAs.get(i);
fr.remove(cols);
}
_dinfo = new DataInfo(fr, 1, standardize);
_glm = new GLMParams(family, tweedie_variance_power, link, tweedie_link_power);
if(alpha.length > 1) { // grid search
if(destination_key == null)destination_key = Key.make("GLMGridModel_"+Key.make());
if(job_key == null)job_key = Key.make("GLMGridJob_"+Key.make());
Job j = gridSearch(self(),destination_key, _dinfo, _glm, lambda, alpha,n_folds);
return GLMGridView.redirect(this,j.destination_key);
} else {
if(destination_key == null)destination_key = Key.make("GLMModel_"+Key.make());
if(job_key == null)job_key = Key.make("GLM2Job_"+Key.make());
fork();
// return GLMModelView.redirect(this, dest(),job_key);
return GLMProgress.redirect(this,job_key, dest());
}
}
private static double beta_diff(double[] b1, double[] b2) {
if(b1 == null)return Double.MAX_VALUE;
double res = Math.abs(b1[0] - b2[0]);
for( int i = 1; i < b1.length; ++i )
res = Math.max(res, Math.abs(b1[i] - b2[i]));
return res;
}
@Override public float progress(){
if(DKV.get(dest()) == null)return 0;
GLMModel m = DKV.get(dest()).get();
float progress = (float)m.iteration()/(float)max_iter; // TODO, do something smarter here
return progress;
}
private class Iteration extends H2OCallback<GLMIterationTask> {
LSMSolver _solver;
final DataInfo _dinfo;
final H2OCountedCompleter _fjt;
// Key _modelKey;
GLMModel _model; // latest model
// GLMModel _oldModel; // model 1 round back (the one being validated)
public Iteration(GLMModel model, LSMSolver solver, DataInfo dinfo, H2OCountedCompleter fjt){
// _modelKey = modelKey;
// _oldModel = model;
_model = model;
_solver = solver;
_dinfo = dinfo;
_fjt = fjt;
}
@Override public Iteration clone(){return new Iteration(_model,_solver,_dinfo,_fjt);}
@Override public void callback(final GLMIterationTask glmt) {
if(isRunning(self())){
double [] newBeta = MemoryManager.malloc8d(glmt._xy.length);
double [] newBetaDeNorm = null;
_solver.solve(glmt._gram, glmt._xy, glmt._yy, newBeta);
final boolean diverged = Utils.hasNaNsOrInfs(newBeta);
if(diverged)
newBeta = glmt._beta == null?newBeta:glmt._beta;
if(_dinfo._standardize) {
newBetaDeNorm = newBeta.clone();
double norm = 0.0; // Reverse any normalization on the intercept
// denormalize only the numeric coefs (categoricals are not normalized)
final int numoff = newBeta.length - _dinfo._nums - 1;
for( int i=numoff; i< newBeta.length-1; i++ ) {
double b = newBetaDeNorm[i]*_dinfo._normMul[i-numoff];
norm += b*_dinfo._normSub[i-numoff]; // Also accumulate the intercept adjustment
newBetaDeNorm[i] = b;
}
newBetaDeNorm[newBetaDeNorm.length-1] -= norm;
}
boolean done = false;
// _model = _oldModel.clone();
done = done || _glm.family == Family.gaussian || (glmt._iter+1) == max_iter || beta_diff(glmt._beta, newBeta) < beta_epsilon || !isRunning(self());
_model.setLambdaSubmodel(_lambdaIdx,newBetaDeNorm == null?newBeta:newBetaDeNorm, newBetaDeNorm==null?null:newBeta, glmt._iter+1);
if(done){
H2OCallback fin = new H2OCallback<GLMValidationTask>() {
@Override public void callback(GLMValidationTask tsk) {
boolean improved = _model.setAndTestValidation(_lambdaIdx,tsk._res);
if(!diverged && (improved || _runAllLambdas) && _lambdaIdx < (lambda.length-1) ){ // continue with next lambda value?
_model.update(self());
_solver = new ADMMSolver(lambda[++_lambdaIdx],alpha[0]);
_solver._jobKey = self();
_solver._id = _jobName;
glmt._val = null;
Iteration.this.callback(glmt);
} else { // nope, we're done
_model.unlock(self());
_fjt.tryComplete(); // signal we're done to anyone waiting for the job
}
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter cc){
_fjt.completeExceptionally(ex);
return true;
}
};
if(GLM2.this.n_folds >= 2) xvalidate(_model, _lambdaIdx, fin);
else new GLMValidationTask(_model,_lambdaIdx,fin).dfork(_dinfo._adaptedFrame);
} else {
if(glmt._val != null){
glmt._val.finalize_AIC_AUC();
_model.setAndTestValidation(_lambdaIdx,glmt._val);//.store();
_model.update(self());
}
int iter = glmt._iter+1;
GLMIterationTask nextIter = new GLMIterationTask(GLM2.this, _dinfo,glmt._glm, case_mode, case_val, newBeta,iter,glmt._ymu,glmt._reg);
nextIter.setCompleter(new Iteration(_model, _solver, _dinfo, _fjt)); // we need to clone here as FJT will set status to done after this method
nextIter.dfork(_dinfo._adaptedFrame);
}
} else throw new JobCancelledException();
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller){
_fjt.completeExceptionally(ex);
return false;
}
}
@Override
public GLM2 fork(){
start(new JobCompleter(this));
run();
return this;
}
// start inside of parent job
public void run(final H2OCountedCompleter fjt){
assert GLM2.this._fjtask == null;
GLM2.this._fjtask = fjt;
run();
}
public void run(){
logStart();
assert alpha.length == 1;
new YMUTask(this, _dinfo, case_mode, case_val, new H2OCallback<YMUTask>() {
@Override public void callback(final YMUTask ymut){
if(ymut._ymin == ymut._ymax){
String msg = case_mode == CaseMode.none
?"Attempting to run GLM on column with constant value = " + ymut._ymin
:"Attempting to run GLM on column with constant value, y " + case_mode + " " + case_val + " is " + (ymut._ymin == 0?"false":"true") + " for all rows!";
GLM2.this.cancel(msg);
GLM2.this._fjtask.completeExceptionally(new JobCancelledException(msg));
}
new LMAXTask(GLM2.this, _dinfo, _glm, ymut.ymu(),alpha[0],new H2OCallback<LMAXTask>(){
@Override public void callback(LMAXTask t){
final double lmax = t.lmax();
if(lambda == null){
lambda = new double[]{lmax,lmax*0.9,lmax*0.75,lmax*0.66,lmax*0.5,lmax*0.33,lmax*0.25,lmax*1e-1,lmax*1e-2,lmax*1e-3,lmax*1e-4,lmax*1e-5,lmax*1e-6,lmax*1e-7,lmax*1e-8}; // todo - make it a sequence of 100 lamdbas
_runAllLambdas = false;
}
else {
int i = 0; while(i < lambda.length && lambda[i] > lmax)++i;
if(i > 0)lambda = i == lambda.length?new double[]{lmax}:Arrays.copyOfRange(lambda, i, lambda.length);
}
GLMIterationTask firstIter = new GLMIterationTask(GLM2.this,_dinfo,_glm,case_mode, case_val, _beta,0,ymut.ymu(),1.0/ymut.nobs());
final ADMMSolver solver = new ADMMSolver(lambda[0], alpha[0]);
solver._proximalPenalty = _proximalPenalty;
solver._wgiven = _wgiven;
solver._jobKey = self();
solver._id = _jobName;
GLMModel model = new GLMModel(self(),dest(),_dinfo, _glm,beta_epsilon,alpha[0],lambda,ymut.ymu(),GLM2.this.case_mode,GLM2.this.case_val);
model.delete_and_lock(self());
firstIter.setCompleter(new Iteration(model,solver,_dinfo,GLM2.this._fjtask));
firstIter.dfork(_dinfo._adaptedFrame);
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter cc){
if(GLM2.this._fjtask != null)GLM2.this._fjtask.completeExceptionally(ex);
return true;
}
}).dfork(_dinfo._adaptedFrame);
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter cc){
if(GLM2.this._fjtask != null)GLM2.this._fjtask.completeExceptionally(ex);
return true;
}
}).dfork(_dinfo._adaptedFrame);
}
private void xvalidate(final GLMModel model, int lambdaIxd,final H2OCountedCompleter cmp){
final Key [] keys = new Key[n_folds];
H2OCallback callback = new H2OCallback() {
@Override public void callback(H2OCountedCompleter t) {
try{
GLMModel [] models = new GLMModel[keys.length];
// we got the xval models, now compute their validations...
for(int i = 0; i < models.length; ++i)models[i] = DKV.get(keys[i]).get();
new GLMXValidationTask(model,_lambdaIdx,models, cmp).dfork(_dinfo._adaptedFrame);
}catch(Throwable ex){cmp.completeExceptionally(ex);}
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller){
cmp.completeExceptionally(ex);
return true;
}
};
callback.addToPendingCount(n_folds-1);
double proximal_penalty = 0;
for(int i = 0; i < n_folds; ++i)
new GLM2(this.description + "xval " + i, self(), keys[i] = Key.make(destination_key + "_" + _lambdaIdx + "_xval" + i), _dinfo.getFold(i, n_folds),_glm,new double[]{lambda[_lambdaIdx]},model.alpha,0, model.beta_eps,self(),model.norm_beta(lambdaIxd),proximal_penalty).
setCase(case_mode,case_val).
run(callback);
}
// Expand grid search related argument sets
@Override protected NanoHTTPD.Response serveGrid(NanoHTTPD server, Properties parms, RequestType type) {
return superServeGrid(server, parms, type);
}
public static final DecimalFormat AUC_DFORMAT = new DecimalFormat("#.###");
public static final String aucStr(double auc){
return AUC_DFORMAT.format(Math.round(1000*auc)*0.001);
}
public static final DecimalFormat AIC_DFORMAT = new DecimalFormat("###.###");
public static final String aicStr(double aic){
return AUC_DFORMAT.format(Math.round(1000*aic)*0.001);
}
public static final DecimalFormat DEV_EXPLAINED_DFORMAT = new DecimalFormat("#.###");
public static final String devExplainedStr(double dev){
return AUC_DFORMAT.format(Math.round(1000*dev)*0.001);
}
public static class GLMGrid extends Iced {
static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields
static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code.
final Key _jobKey;
final long _startTime;
@API(help="mean of response in the training dataset")
final Key [] destination_keys;
final double [] _alphas;
// final Comparator<GLMModel> _cmp;
// public GLMGrid (Key [] keys, double [] alphas){
// this(keys,alphas,null);
// }
public GLMGrid (Key jobKey, GLM2 [] jobs){
_jobKey = jobKey;
_alphas = new double [jobs.length];
destination_keys = new Key[jobs.length];
for(int i = 0; i < jobs.length; ++i){
destination_keys[i] = jobs[i].destination_key;
_alphas[i] = jobs[i].alpha[0];
}
_startTime = System.currentTimeMillis();
}
}
public static class GLMGridSearch extends Job {
public final int _maxParallelism;
transient private AtomicInteger _idx;
public final GLM2 [] _jobs;
public GLMGridSearch(int maxP, Key jobKey, Key dstKey, DataInfo dinfo, GLMParams glm, double [] lambdas, double [] alphas, int nfolds, double betaEpsilon){
super(jobKey, dstKey);
description = "GLM Grid with params " + glm.toString() + "on data " + dinfo.toString() ;
_maxParallelism = maxP;
_jobs = new GLM2[alphas.length];
_idx = new AtomicInteger(_maxParallelism);
for(int i = 0; i < _jobs.length; ++i) _jobs[i] = new GLM2("GLM grid(" + i + ")",self(),Key.make(dstKey.toString() + "_" + i),dinfo,glm,lambdas,alphas[i], nfolds, betaEpsilon,self());
}
@Override public float progress(){
float sum = 0f;
for(GLM2 g:_jobs)sum += g.progress();
return sum/_jobs.length;
}
@Override
public Job fork(){
DKV.put(destination_key, new GLMGrid(self(),_jobs));
assert _maxParallelism >= 1;
final H2OCountedCompleter fjt = new JobCompleter(this);
fjt.setPendingCount(_jobs.length-1);
start(fjt);
for(int i = 0; i < Math.min(_jobs.length,_maxParallelism); ++i){
_jobs[i].run(new H2OCallback(fjt) {
@Override public void callback(H2OCountedCompleter t) {
int nextJob = _idx.getAndIncrement();
if(nextJob < _jobs.length){
_jobs[nextJob].run(clone());
}
}
});
}
return this;
}
@Override public Response redirect() {
String n = GridSearchProgress.class.getSimpleName();
return Response.redirect( this, n, "job_key", job_key, "destination_key", destination_key);
}
}
public boolean isDone(){return DKV.get(self()) == null;}
}
|
src/main/java/hex/glm/GLM2.java
|
package hex.glm;
import hex.FrameTask.DataInfo;
import hex.GridSearch.GridSearchProgress;
import hex.glm.GLMModel.GLMValidationTask;
import hex.glm.GLMModel.GLMXValidationTask;
import hex.glm.GLMParams.CaseMode;
import hex.glm.GLMParams.Family;
import hex.glm.GLMParams.Link;
import hex.glm.GLMTask.GLMIterationTask;
import hex.glm.GLMTask.LMAXTask;
import hex.glm.GLMTask.YMUTask;
import hex.glm.LSMSolver.ADMMSolver;
import java.text.DecimalFormat;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import jsr166y.CountedCompleter;
import water.*;
import water.H2O.H2OCallback;
import water.H2O.H2OCountedCompleter;
import water.H2O.JobCompleter;
import water.Job.ModelJob;
import water.api.DocGen;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.*;
public class GLM2 extends ModelJob {
public final String _jobName;
// private transient GLM2 [] _subjobs;
// private Key _parentjob;
@API(help = "max-iterations", filter = Default.class, lmin=1, lmax=1000000)
int max_iter = 50;
@API(help = "If true, data will be standardized on the fly when computing the model.", filter = Default.class)
boolean standardize = true;
@API(help = "validation folds", filter = Default.class, lmin=0, lmax=100)
int n_folds;
@API(help = "Family.", filter = Default.class)
Family family = Family.gaussian;
private DataInfo _dinfo;
private GLMParams _glm;
private double [] _wgiven;
private double _proximalPenalty;
private double [] _beta;
private boolean _runAllLambdas = true;
// @API(help = "Link.", filter = Default.class)
Link link = Link.identity;
@API(help = "CaseMode", filter = Default.class)
CaseMode case_mode = CaseMode.none;
@API(help = "CaseMode", filter = Default.class)
double case_val = 0;
@API(help = "Tweedie variance power", filter = Default.class)
double tweedie_variance_power;
double tweedie_link_power;
@API(help = "alpha", filter = Default.class)
double [] alpha = new double[]{0.5};
// @API(help = "lambda", filter = RSeq2.class)
@API(help = "lambda", filter = Default.class)
double [] lambda;// = new double[]{1e-5};
public static final double DEFAULT_BETA_EPS = 1e-4;
@API(help = "beta_eps", filter = Default.class)
double beta_epsilon = DEFAULT_BETA_EPS;
int _lambdaIdx = 0;
@Override public Key defaultDestKey(){
return null;
}
@Override public Key defaultJobKey() {return null;}
public GLM2() {_jobName = "";}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda){
this(desc,jobKey,dest,dinfo,glm,lambda,0.5,0);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha){
this(desc,jobKey,dest,dinfo,glm,lambda,alpha,0);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha, int nfolds){
this(desc,jobKey,dest,dinfo,glm,lambda,0.5,nfolds,DEFAULT_BETA_EPS);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha,int nfolds, double betaEpsilon){
this(desc,jobKey,dest,dinfo,glm,lambda,alpha,nfolds,betaEpsilon,null);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha, int nfolds, double betaEpsilon, Key parentJob){
this(desc,jobKey,dest,dinfo,glm,lambda,alpha,nfolds,betaEpsilon,parentJob, null,0);
}
public GLM2(String desc, Key jobKey, Key dest, DataInfo dinfo, GLMParams glm, double [] lambda, double alpha, int nfolds, double betaEpsilon, Key parentJob, double [] beta, double proximalPenalty) {
assert beta == null || beta.length == (dinfo.fullN()+1):"unexpected size of beta, got length " + beta.length + ", expected " + dinfo.fullN();
job_key = jobKey;
description = desc;
destination_key = dest;
this.beta_epsilon = betaEpsilon;
_beta = beta;
_dinfo = dinfo;
_glm = glm;
this.lambda = lambda;
_beta = beta;
if((_proximalPenalty = proximalPenalty) != 0)
_wgiven = beta;
this.alpha= new double[]{alpha};
this.n_folds = nfolds;
source = dinfo._adaptedFrame;
response = dinfo._adaptedFrame.lastVec();
_jobName = dest.toString() + ((nfolds > 1)?("[" + dinfo._foldId + "]"):"");
}
static String arrayToString (double[] arr) {
if (arr == null) {
return "(null)";
}
StringBuffer sb = new StringBuffer();
for (int i = 0; i < arr.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(arr[i]);
}
return sb.toString();
}
@Override protected void logStart() {
Log.info("Starting GLM2 model build...");
super.logStart();
Log.info(" max_iter: ", max_iter);
Log.info(" standardize: ", standardize);
Log.info(" n_folds: ", n_folds);
Log.info(" family: ", family);
Log.info(" wgiven: " + arrayToString(_wgiven));
Log.info(" proximalPenalty: " + _proximalPenalty);
Log.info(" runAllLambdas: " + _runAllLambdas);
Log.info(" link: " + link);
Log.info(" case_mode: " + case_mode);
Log.info(" case_val: " + case_val);
Log.info(" tweedie_variance_power: " + tweedie_variance_power);
Log.info(" tweedie_link_power: " + tweedie_link_power);
Log.info(" alpha: " + arrayToString(alpha));
Log.info(" lambda: " + arrayToString(lambda));
Log.info(" beta_epsilon: " + beta_epsilon);
Log.info(" description: " + description);
}
public GLM2 setCase(CaseMode cm, double cv){
case_mode = cm;
case_val = cv;
return this;
}
/** Return the query link to this page */
public static String link(Key k, String content) {
RString rs = new RString("<a href='GLM2.query?source=%$key'>%content</a>");
rs.replace("key", k.toString());
rs.replace("content", content);
return rs.toString();
}
public static Job gridSearch(Key jobKey, Key destinationKey, DataInfo dinfo, GLMParams glm, double [] lambda, double [] alpha, int nfolds){
return gridSearch(jobKey, destinationKey, dinfo, glm, lambda, alpha,nfolds,DEFAULT_BETA_EPS);
}
public static Job gridSearch(Key jobKey, Key destinationKey, DataInfo dinfo, GLMParams glm, double [] lambda, double [] alpha, int nfolds, double betaEpsilon){
return new GLMGridSearch(4, jobKey, destinationKey,dinfo,glm,lambda,alpha, nfolds,betaEpsilon).fork();
}
@Override protected Response serve() {
init();
link = family.defaultLink;// TODO
tweedie_link_power = 1 - tweedie_variance_power;// TODO
Frame fr = new Frame(source._names.clone(),source.vecs().clone());
fr.remove(ignored_cols);
final Vec [] vecs = fr.vecs();
ArrayList<Integer> constantOrNAs = new ArrayList<Integer>();
for(int i = 0; i < vecs.length-1; ++i)// put response to the end
if(vecs[i] == response){
fr.add(fr._names[i], fr.remove(i));
break;
}
for(int i = 0; i < vecs.length-1; ++i) // remove constant cols and cols with too many NAs
if(vecs[i].min() == vecs[i].max() || vecs[i].naCnt() > vecs[i].length()*0.2)constantOrNAs.add(i);
if(!constantOrNAs.isEmpty()){
int [] cols = new int[constantOrNAs.size()];
for(int i = 0; i < cols.length; ++i)cols[i] = constantOrNAs.get(i);
fr.remove(cols);
}
_dinfo = new DataInfo(fr, 1, standardize);
_glm = new GLMParams(family, tweedie_variance_power, link, tweedie_link_power);
if(alpha.length > 1) { // grid search
if(destination_key == null)destination_key = Key.make("GLMGridModel_"+Key.make());
if(job_key == null)job_key = Key.make("GLMGridJob_"+Key.make());
Job j = gridSearch(self(),destination_key, _dinfo, _glm, lambda, alpha,n_folds);
return GLMGridView.redirect(this,j.destination_key);
} else {
if(destination_key == null)destination_key = Key.make("GLMModel_"+Key.make());
if(job_key == null)job_key = Key.make("GLM2Job_"+Key.make());
fork();
// return GLMModelView.redirect(this, dest(),job_key);
return GLMProgress.redirect(this,job_key, dest());
}
}
private static double beta_diff(double[] b1, double[] b2) {
if(b1 == null)return Double.MAX_VALUE;
double res = Math.abs(b1[0] - b2[0]);
for( int i = 1; i < b1.length; ++i )
res = Math.max(res, Math.abs(b1[i] - b2[i]));
return res;
}
@Override public float progress(){
if(DKV.get(dest()) == null)return 0;
GLMModel m = DKV.get(dest()).get();
float progress = (float)m.iteration()/(float)max_iter; // TODO, do something smarter here
System.out.println("glm progress = " + progress);
return progress;
}
private class Iteration extends H2OCallback<GLMIterationTask> {
LSMSolver _solver;
final DataInfo _dinfo;
final H2OCountedCompleter _fjt;
// Key _modelKey;
GLMModel _model; // latest model
// GLMModel _oldModel; // model 1 round back (the one being validated)
public Iteration(GLMModel model, LSMSolver solver, DataInfo dinfo, H2OCountedCompleter fjt){
// _modelKey = modelKey;
// _oldModel = model;
_model = model;
_solver = solver;
_dinfo = dinfo;
_fjt = fjt;
}
@Override public Iteration clone(){return new Iteration(_model,_solver,_dinfo,_fjt);}
@Override public void callback(final GLMIterationTask glmt) {
if(isRunning(self())){
double [] newBeta = MemoryManager.malloc8d(glmt._xy.length);
double [] newBetaDeNorm = null;
_solver.solve(glmt._gram, glmt._xy, glmt._yy, newBeta);
final boolean diverged = Utils.hasNaNsOrInfs(newBeta);
if(diverged)
newBeta = glmt._beta == null?newBeta:glmt._beta;
if(_dinfo._standardize) {
newBetaDeNorm = newBeta.clone();
double norm = 0.0; // Reverse any normalization on the intercept
// denormalize only the numeric coefs (categoricals are not normalized)
final int numoff = newBeta.length - _dinfo._nums - 1;
for( int i=numoff; i< newBeta.length-1; i++ ) {
double b = newBetaDeNorm[i]*_dinfo._normMul[i-numoff];
norm += b*_dinfo._normSub[i-numoff]; // Also accumulate the intercept adjustment
newBetaDeNorm[i] = b;
}
newBetaDeNorm[newBetaDeNorm.length-1] -= norm;
}
boolean done = false;
// _model = _oldModel.clone();
done = done || _glm.family == Family.gaussian || (glmt._iter+1) == max_iter || beta_diff(glmt._beta, newBeta) < beta_epsilon || !isRunning(self());
_model.setLambdaSubmodel(_lambdaIdx,newBetaDeNorm == null?newBeta:newBetaDeNorm, newBetaDeNorm==null?null:newBeta, glmt._iter+1);
if(done){
H2OCallback fin = new H2OCallback<GLMValidationTask>() {
@Override public void callback(GLMValidationTask tsk) {
boolean improved = _model.setAndTestValidation(_lambdaIdx,tsk._res);
if(!diverged && (improved || _runAllLambdas) && _lambdaIdx < (lambda.length-1) ){ // continue with next lambda value?
_model.update(self());
_solver = new ADMMSolver(lambda[++_lambdaIdx],alpha[0]);
_solver._jobKey = self();
_solver._id = _jobName;
glmt._val = null;
Iteration.this.callback(glmt);
} else { // nope, we're done
_model.unlock(self());
_fjt.tryComplete(); // signal we're done to anyone waiting for the job
}
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter cc){
_fjt.completeExceptionally(ex);
return true;
}
};
if(GLM2.this.n_folds >= 2) xvalidate(_model, _lambdaIdx, fin);
else new GLMValidationTask(_model,_lambdaIdx,fin).dfork(_dinfo._adaptedFrame);
} else {
if(glmt._val != null){
glmt._val.finalize_AIC_AUC();
_model.setAndTestValidation(_lambdaIdx,glmt._val);//.store();
_model.update(self());
}
int iter = glmt._iter+1;
GLMIterationTask nextIter = new GLMIterationTask(GLM2.this, _dinfo,glmt._glm, case_mode, case_val, newBeta,iter,glmt._ymu,glmt._reg);
nextIter.setCompleter(new Iteration(_model, _solver, _dinfo, _fjt)); // we need to clone here as FJT will set status to done after this method
nextIter.dfork(_dinfo._adaptedFrame);
}
} else throw new JobCancelledException();
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller){
_fjt.completeExceptionally(ex);
return false;
}
}
@Override
public GLM2 fork(){
start(new JobCompleter(this));
run();
return this;
}
// start inside of parent job
public void run(final H2OCountedCompleter fjt){
assert GLM2.this._fjtask == null;
GLM2.this._fjtask = fjt;
run();
}
public void run(){
logStart();
assert alpha.length == 1;
new YMUTask(this, _dinfo, case_mode, case_val, new H2OCallback<YMUTask>() {
@Override public void callback(final YMUTask ymut){
if(ymut._ymin == ymut._ymax){
String msg = case_mode == CaseMode.none
?"Attempting to run GLM on column with constant value = " + ymut._ymin
:"Attempting to run GLM on column with constant value, y " + case_mode + " " + case_val + " is " + (ymut._ymin == 0?"false":"true") + " for all rows!";
GLM2.this.cancel(msg);
GLM2.this._fjtask.completeExceptionally(new JobCancelledException(msg));
}
new LMAXTask(GLM2.this, _dinfo, _glm, ymut.ymu(),alpha[0],new H2OCallback<LMAXTask>(){
@Override public void callback(LMAXTask t){
final double lmax = t.lmax();
if(lambda == null){
lambda = new double[]{lmax,lmax*0.9,lmax*0.75,lmax*0.66,lmax*0.5,lmax*0.33,lmax*0.25,lmax*1e-1,lmax*1e-2,lmax*1e-3,lmax*1e-4,lmax*1e-5,lmax*1e-6,lmax*1e-7,lmax*1e-8}; // todo - make it a sequence of 100 lamdbas
_runAllLambdas = false;
}
else {
int i = 0; while(i < lambda.length && lambda[i] > lmax)++i;
if(i > 0)lambda = i == lambda.length?new double[]{lmax}:Arrays.copyOfRange(lambda, i, lambda.length);
}
GLMIterationTask firstIter = new GLMIterationTask(GLM2.this,_dinfo,_glm,case_mode, case_val, _beta,0,ymut.ymu(),1.0/ymut.nobs());
final ADMMSolver solver = new ADMMSolver(lambda[0], alpha[0]);
solver._proximalPenalty = _proximalPenalty;
solver._wgiven = _wgiven;
solver._jobKey = self();
solver._id = _jobName;
GLMModel model = new GLMModel(self(),dest(),_dinfo, _glm,beta_epsilon,alpha[0],lambda,ymut.ymu(),GLM2.this.case_mode,GLM2.this.case_val);
model.delete_and_lock(self());
firstIter.setCompleter(new Iteration(model,solver,_dinfo,GLM2.this._fjtask));
firstIter.dfork(_dinfo._adaptedFrame);
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter cc){
if(GLM2.this._fjtask != null)GLM2.this._fjtask.completeExceptionally(ex);
return true;
}
}).dfork(_dinfo._adaptedFrame);
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter cc){
if(GLM2.this._fjtask != null)GLM2.this._fjtask.completeExceptionally(ex);
return true;
}
}).dfork(_dinfo._adaptedFrame);
}
private void xvalidate(final GLMModel model, int lambdaIxd,final H2OCountedCompleter cmp){
final Key [] keys = new Key[n_folds];
H2OCallback callback = new H2OCallback() {
@Override public void callback(H2OCountedCompleter t) {
try{
GLMModel [] models = new GLMModel[keys.length];
// we got the xval models, now compute their validations...
for(int i = 0; i < models.length; ++i)models[i] = DKV.get(keys[i]).get();
new GLMXValidationTask(model,_lambdaIdx,models, cmp).dfork(_dinfo._adaptedFrame);
}catch(Throwable ex){cmp.completeExceptionally(ex);}
}
@Override public boolean onExceptionalCompletion(Throwable ex, CountedCompleter caller){
cmp.completeExceptionally(ex);
return true;
}
};
callback.addToPendingCount(n_folds-1);
double proximal_penalty = 0;
for(int i = 0; i < n_folds; ++i)
new GLM2(this.description + "xval " + i, self(), keys[i] = Key.make(destination_key + "_" + _lambdaIdx + "_xval" + i), _dinfo.getFold(i, n_folds),_glm,new double[]{lambda[_lambdaIdx]},model.alpha,0, model.beta_eps,self(),model.norm_beta(lambdaIxd),proximal_penalty).
setCase(case_mode,case_val).
run(callback);
}
// Expand grid search related argument sets
@Override protected NanoHTTPD.Response serveGrid(NanoHTTPD server, Properties parms, RequestType type) {
return superServeGrid(server, parms, type);
}
public static final DecimalFormat AUC_DFORMAT = new DecimalFormat("#.###");
public static final String aucStr(double auc){
return AUC_DFORMAT.format(Math.round(1000*auc)*0.001);
}
public static final DecimalFormat AIC_DFORMAT = new DecimalFormat("###.###");
public static final String aicStr(double aic){
return AUC_DFORMAT.format(Math.round(1000*aic)*0.001);
}
public static final DecimalFormat DEV_EXPLAINED_DFORMAT = new DecimalFormat("#.###");
public static final String devExplainedStr(double dev){
return AUC_DFORMAT.format(Math.round(1000*dev)*0.001);
}
public static class GLMGrid extends Iced {
static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields
static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code.
final Key _jobKey;
final long _startTime;
@API(help="mean of response in the training dataset")
final Key [] destination_keys;
final double [] _alphas;
// final Comparator<GLMModel> _cmp;
// public GLMGrid (Key [] keys, double [] alphas){
// this(keys,alphas,null);
// }
public GLMGrid (Key jobKey, GLM2 [] jobs){
_jobKey = jobKey;
_alphas = new double [jobs.length];
destination_keys = new Key[jobs.length];
for(int i = 0; i < jobs.length; ++i){
destination_keys[i] = jobs[i].destination_key;
_alphas[i] = jobs[i].alpha[0];
}
_startTime = System.currentTimeMillis();
}
}
public static class GLMGridSearch extends Job {
public final int _maxParallelism;
transient private AtomicInteger _idx;
public final GLM2 [] _jobs;
public GLMGridSearch(int maxP, Key jobKey, Key dstKey, DataInfo dinfo, GLMParams glm, double [] lambdas, double [] alphas, int nfolds, double betaEpsilon){
super(jobKey, dstKey);
description = "GLM Grid with params " + glm.toString() + "on data " + dinfo.toString() ;
_maxParallelism = maxP;
_jobs = new GLM2[alphas.length];
_idx = new AtomicInteger(_maxParallelism);
for(int i = 0; i < _jobs.length; ++i) _jobs[i] = new GLM2("GLM grid(" + i + ")",self(),Key.make(dstKey.toString() + "_" + i),dinfo,glm,lambdas,alphas[i], nfolds, betaEpsilon,self());
}
@Override public float progress(){
float sum = 0f;
for(GLM2 g:_jobs)sum += g.progress();
return sum/_jobs.length;
}
@Override
public Job fork(){
DKV.put(destination_key, new GLMGrid(self(),_jobs));
assert _maxParallelism >= 1;
final H2OCountedCompleter fjt = new JobCompleter(this);
fjt.setPendingCount(_jobs.length-1);
start(fjt);
for(int i = 0; i < Math.min(_jobs.length,_maxParallelism); ++i){
_jobs[i].run(new H2OCallback(fjt) {
@Override public void callback(H2OCountedCompleter t) {
int nextJob = _idx.getAndIncrement();
if(nextJob < _jobs.length){
_jobs[nextJob].run(clone());
}
}
});
}
return this;
}
@Override public Response redirect() {
String n = GridSearchProgress.class.getSimpleName();
return Response.redirect( this, n, "job_key", job_key, "destination_key", destination_key);
}
}
public boolean isDone(){return DKV.get(self()) == null;}
}
|
Removed debug print in GLM2.
|
src/main/java/hex/glm/GLM2.java
|
Removed debug print in GLM2.
|
|
Java
|
apache-2.0
|
fd81196fd492d23585d155da2c8ab124d748dd44
| 0
|
ftldb/ftldb,ftldb/ftldb
|
/*
* Copyright 2014-2016 Victor Osolovskiy, Sergey Navrotskiy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ftldb;
import freemarker.template.Configuration;
import freemarker.template.TemplateException;
import freemarker.template.Version;
import java.beans.ExceptionListener;
import java.beans.XMLDecoder;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
/**
* This class sets up the FreeMarker configuration as a singleton.
*/
public class Configurator {
// The current configuration.
private static Configuration config;
/**
* Getter for the inner {@link Configuration} static field.
*
* @return the current configuration
*/
public static synchronized Configuration getConfiguration() {
ensureConfigurationIsSet();
return config;
}
/**
* Setter for the inner {@link Configuration} static field.
*
* @param config the new configuration
*/
public static synchronized void setConfiguration(Configuration config) {
Configurator.config = config;
}
/**
* Creates a new {@link Configuration} instance from a JavaBean serialized with {@link java.beans.XMLEncoder}.
*
* @param configXMLInputStream XML binary stream
* @return a new instance
*/
public static Configuration newConfiguration(InputStream configXMLInputStream) {
XMLDecoder decoder = new XMLDecoder(configXMLInputStream, null, new ExceptionListener() {
public void exceptionThrown(Exception e) {
throw ((e instanceof RuntimeException) && !(e instanceof ArrayIndexOutOfBoundsException))
? (RuntimeException) e
: new RuntimeException(e);
}
});
Configuration cfg = null;
for (;;) {
try {
Object obj = decoder.readObject();
if (obj instanceof Configuration) {
cfg = (Configuration) obj;
}
} catch (ArrayIndexOutOfBoundsException e) {
break;
}
}
if (cfg == null) {
throw new RuntimeException("Provided XML contains no " + Configuration.class.getName() + " objects");
}
decoder.close();
return cfg;
}
/**
* Instantiates a new {@link Configuration} object from a JavaBean serialized with {@link java.beans.XMLEncoder} and
* sets it as the current configuration.
*
* @param configXMLInputStream the new configuration as an XML binary stream
*/
public static void setConfiguration(InputStream configXMLInputStream) {
setConfiguration(newConfiguration(configXMLInputStream));
}
/**
* The convenience method for {@link #setConfiguration(InputStream)}.
*
* @param configXMLString the new configuration as an XML string
*/
public static void setConfiguration(String configXMLString) {
setConfiguration(new ByteArrayInputStream(configXMLString.getBytes()));
}
/**
* Sets the specified setting in the current configuration with the specified value.
*
* @param name the setting name
* @param value the setting value
* @throws TemplateException if cannot set the setting
*/
public static synchronized void setConfigurationSetting(String name, String value) throws TemplateException {
ensureConfigurationIsSet();
config.setSetting(name, value);
}
/**
* Drops the current configuration. In order to continue, the configuration must be re-set.
*/
public static synchronized void dropConfiguration() {
config = null;
}
private static void ensureConfigurationIsSet() {
if (config == null) {
throw new NullPointerException("FTLDB configuration is not initialized");
}
}
private static final String VERSION_PROPERTY_PATH = "ftldb/version.properties";
private static final String VERSION_PROPERTY_NAME = "version";
private static Version VERSION;
/**
* Returns FTLDB version as a {@link Version} instance.
*
* @return FTLDB version
*/
public static Version getVersion() {
if (VERSION == null) {
VERSION = readVersion();
}
return VERSION;
}
private static Version readVersion() {
try {
Properties vp = new Properties();
InputStream ins = Configurator.class.getClassLoader().getResourceAsStream(VERSION_PROPERTY_PATH);
if (ins == null) {
throw new RuntimeException("FTLDB version file is missing: " + VERSION_PROPERTY_PATH);
} else {
try {
vp.load(ins);
} finally {
ins.close();
}
String versionString = vp.getProperty(VERSION_PROPERTY_NAME);
if (versionString == null) {
throw new RuntimeException("FTLDB version file is corrupt: \"" + VERSION_PROPERTY_NAME
+ "\" property is missing.");
}
return new Version(versionString);
}
} catch (IOException e) {
throw new RuntimeException("Failed to load and parse " + VERSION_PROPERTY_PATH, e);
}
}
/**
* Returns FreeMarker version as a {@link Version} instance.
*
* @return FreeMarker version
*/
public static Version getFreeMarkerVersion() {
return Configuration.getVersion();
}
}
|
ftldb/src/main/java/ftldb/Configurator.java
|
/*
* Copyright 2014-2016 Victor Osolovskiy, Sergey Navrotskiy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ftldb;
import freemarker.template.Configuration;
import freemarker.template.TemplateException;
import freemarker.template.Version;
import java.beans.ExceptionListener;
import java.beans.XMLDecoder;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
/**
* This class sets up the FreeMarker configuration as a singleton.
*/
public class Configurator {
// The current configuration.
private static Configuration config;
/**
* Getter for the inner {@link Configuration} static field.
*
* @return the current configuration
*/
public static synchronized Configuration getConfiguration() {
ensureConfigurationIsSet();
return config;
}
/**
* Setter for the inner {@link Configuration} static field.
*
* @param config the new configuration
*/
public static synchronized void setConfiguration(Configuration config) {
Configurator.config = config;
}
/**
* Creates a new {@link Configuration} instance from a JavaBean serialized with {@link java.beans.XMLEncoder}.
*
* @param configXMLInputStream XML binary stream
* @return a new instance
*/
public static Configuration newConfiguration(InputStream configXMLInputStream) {
XMLDecoder decoder = new XMLDecoder(configXMLInputStream, null, new ExceptionListener() {
public void exceptionThrown(Exception e) {
throw ((e instanceof RuntimeException) && !(e instanceof ArrayIndexOutOfBoundsException))
? (RuntimeException) e
: new RuntimeException(e);
}
});
Object obj;
do {
try {
obj = decoder.readObject();
} catch (ArrayIndexOutOfBoundsException e) {
obj = null;
}
if (obj == null) {
throw new RuntimeException("Provided XML contains no " + Configuration.class.getName() + " objects");
}
} while (!(obj instanceof Configuration));
decoder.close();
return (Configuration) obj;
}
/**
* Instantiates a new {@link Configuration} object from a JavaBean serialized with {@link java.beans.XMLEncoder} and
* sets it as the current configuration.
*
* @param configXMLInputStream the new configuration as an XML binary stream
*/
public static void setConfiguration(InputStream configXMLInputStream) {
setConfiguration(newConfiguration(configXMLInputStream));
}
/**
* The convenience method for {@link #setConfiguration(InputStream)}.
*
* @param configXMLString the new configuration as an XML string
*/
public static void setConfiguration(String configXMLString) {
setConfiguration(new ByteArrayInputStream(configXMLString.getBytes()));
}
/**
* Sets the specified setting in the current configuration with the specified value.
*
* @param name the setting name
* @param value the setting value
* @throws TemplateException if cannot set the setting
*/
public static synchronized void setConfigurationSetting(String name, String value) throws TemplateException {
ensureConfigurationIsSet();
config.setSetting(name, value);
}
/**
* Drops the current configuration. In order to continue, the configuration must be re-set.
*/
public static synchronized void dropConfiguration() {
config = null;
}
private static void ensureConfigurationIsSet() {
if (config == null) {
throw new NullPointerException("FTLDB configuration is not initialized");
}
}
private static final String VERSION_PROPERTY_PATH = "ftldb/version.properties";
private static final String VERSION_PROPERTY_NAME = "version";
private static Version VERSION;
/**
* Returns FTLDB version as a {@link Version} instance.
*
* @return FTLDB version
*/
public static Version getVersion() {
if (VERSION == null) {
VERSION = readVersion();
}
return VERSION;
}
private static Version readVersion() {
try {
Properties vp = new Properties();
InputStream ins = Configurator.class.getClassLoader().getResourceAsStream(VERSION_PROPERTY_PATH);
if (ins == null) {
throw new RuntimeException("FTLDB version file is missing: " + VERSION_PROPERTY_PATH);
} else {
try {
vp.load(ins);
} finally {
ins.close();
}
String versionString = vp.getProperty(VERSION_PROPERTY_NAME);
if (versionString == null) {
throw new RuntimeException("FTLDB version file is corrupt: \"" + VERSION_PROPERTY_NAME
+ "\" property is missing.");
}
return new Version(versionString);
}
} catch (IOException e) {
throw new RuntimeException("Failed to load and parse " + VERSION_PROPERTY_PATH, e);
}
}
/**
* Returns FreeMarker version as a {@link Version} instance.
*
* @return FreeMarker version
*/
public static Version getFreeMarkerVersion() {
return Configuration.getVersion();
}
}
|
Fixed issue #37.
|
ftldb/src/main/java/ftldb/Configurator.java
|
Fixed issue #37.
|
|
Java
|
apache-2.0
|
0299a87e683a42dcad3d805ee46a510840e2dee9
| 0
|
lstephen/ootp-ai,lstephen/ootp-ai,lstephen/ootp-ai,lstephen/ootp-ai,lstephen/ootp-ai
|
package com.github.lstephen.ootp.ai.roster;
import com.github.lstephen.ootp.ai.io.Printable;
import com.github.lstephen.ootp.ai.player.Player;
import com.github.lstephen.ootp.ai.roster.Roster.Status;
import java.io.PrintWriter;
import java.util.Set;
import com.google.common.base.Function;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
/**
*
* @author lstephen
*/
public class RosterChanges implements Printable {
private final Set<RosterChange> changes = Sets.newHashSet();
public void addChange(Player player, Status from, Status to) {
changes.add(new RosterChange(player, from, to));
}
@Override
public void print(PrintWriter w) {
w.println();
for (RosterChange c : RosterChange.ordering().sortedCopy(changes)) {
c.println(w);
}
}
private static class RosterChange {
private final Player player;
private final Status from;
private final Status to;
public RosterChange(Player player, Status from, Status to) {
this.player = player;
this.from = from;
this.to = to;
}
public void println(PrintWriter w) {
w.println(
String.format(
"%4s -> %-4s %2s %s",
from == null ? "" : from,
to == null ? "" : to,
player.getListPosition().or(""),
player.getName()));
}
public static Ordering<RosterChange> ordering() {
return Ordering
.natural()
.nullsLast()
.onResultOf((RosterChange c) -> c.from)
.compound(Ordering
.natural()
.nullsLast()
.onResultOf(c -> c.to));
}
}
}
|
src/main/java/com/github/lstephen/ootp/ai/roster/RosterChanges.java
|
package com.github.lstephen.ootp.ai.roster;
import com.github.lstephen.ootp.ai.io.Printable;
import com.github.lstephen.ootp.ai.player.Player;
import com.github.lstephen.ootp.ai.roster.Roster.Status;
import java.io.PrintWriter;
import java.util.Set;
import com.google.common.base.Function;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
/**
*
* @author lstephen
*/
public class RosterChanges implements Printable {
private final Set<RosterChange> changes = Sets.newHashSet();
public void addChange(Player player, Status from, Status to) {
changes.add(new RosterChange(player, from, to));
}
@Override
public void print(PrintWriter w) {
w.println();
for (RosterChange c : RosterChange.ordering().sortedCopy(changes)) {
c.println(w);
}
}
private static class RosterChange {
private final Player player;
private final Status from;
private final Status to;
public RosterChange(Player player, Status from, Status to) {
this.player = player;
this.from = from;
this.to = to;
}
public void println(PrintWriter w) {
w.println(
String.format(
"%4s -> %-4s %2s %s",
from == null ? "" : from,
to == null ? "" : to,
player.getPosition(),
player.getShortName()));
}
public static Ordering<RosterChange> ordering() {
return Ordering
.natural()
.nullsLast()
.onResultOf((RosterChange c) -> c.from)
.compound(Ordering
.natural()
.nullsLast()
.onResultOf(c -> c.to));
}
}
}
|
Use full name in roster changes
|
src/main/java/com/github/lstephen/ootp/ai/roster/RosterChanges.java
|
Use full name in roster changes
|
|
Java
|
apache-2.0
|
c9686687f7201f747c2de8af01a20d16b991f3a4
| 0
|
michel-kraemer/citeproc-java
|
package de.undercouch.citeproc;
import de.undercouch.citeproc.bibtex.BibTeXConverter;
import de.undercouch.citeproc.bibtex.BibTeXItemDataProvider;
import de.undercouch.citeproc.csl.CSLCitation;
import de.undercouch.citeproc.csl.CSLItemData;
import de.undercouch.citeproc.output.Bibliography;
import de.undercouch.citeproc.output.Citation;
import org.jbibtex.BibTeXDatabase;
import org.jbibtex.ParseException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.yaml.snakeyaml.Yaml;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.GZIPInputStream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assume.assumeFalse;
@RunWith(Parameterized.class)
public class FixturesTest {
private static final String FIXTURES_DIR = "/fixtures";
private static final Map<String, ItemDataProvider> bibliographyFileCache = new HashMap<>();
/**
* {@code true} if the test should be run in experimental mode
*/
private boolean experimentalMode;
/**
* The output format to generate
*/
private String outputFormat;
/**
* The expected rendered result
*/
private String expectedResult;
/**
* The test data
*/
private Map<String, Object> data;
/**
* Get all test files
*/
@Parameterized.Parameters(name = "{0}, {1}, {2}")
@SuppressWarnings("unchecked")
public static Iterable<Object[]> data() {
URL fixturesUrl = CSL.class.getResource(FIXTURES_DIR);
File fixturesDir = new File(fixturesUrl.getPath());
// noinspection ConstantConditions
return Arrays.stream(fixturesDir.listFiles((dir, name) -> name.endsWith(".yaml")))
.flatMap(f -> {
Map<String, Object> data;
Yaml yaml = new Yaml();
try (FileInputStream is = new FileInputStream(f)) {
data = yaml.loadAs(is, Map.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
Object expectedResultObj = data.get("result");
if (expectedResultObj instanceof String) {
String str = (String)expectedResultObj;
Map<String, String> map = new HashMap<>();
map.put("text", str);
expectedResultObj = map;
}
Map<String, String> expectedResults = (Map<String, String>)expectedResultObj;
return Stream.of(true, false).flatMap(experimentalMode ->
expectedResults.entrySet().stream().map(expectedResult ->
new Object[] {
f.getName().substring(0, f.getName().length() - 5),
experimentalMode,
expectedResult.getKey(),
expectedResult.getValue(),
data
}
)
);
})
.collect(Collectors.toList());
}
/**
* Create a new test
* @param name the name of the test file
* @param experimentalMode {@code true} if the test should be run in
* experimental mode
* @param outputFormat the output format to generate
* @param expectedResult the expected rendered result
* @param data the test data
*/
public FixturesTest(@SuppressWarnings("unused") String name, boolean experimentalMode,
String outputFormat, String expectedResult, Map<String, Object> data) {
this.experimentalMode = experimentalMode;
this.outputFormat = outputFormat;
this.expectedResult = expectedResult;
this.data = data;
}
private static ItemDataProvider loadBibliographyFile(String filename) throws IOException {
ItemDataProvider result = bibliographyFileCache.get(filename);
if (result == null) {
BibTeXDatabase db;
try (InputStream is = FixturesTest.class.getResourceAsStream(filename);
BufferedInputStream bis = new BufferedInputStream(is)) {
InputStream tis = bis;
if (filename.endsWith(".gz")) {
tis = new GZIPInputStream(bis);
}
db = new BibTeXConverter().loadDatabase(tis);
} catch (ParseException e) {
throw new IOException(e);
}
BibTeXItemDataProvider r = new BibTeXItemDataProvider();
r.addDatabase(db);
result = r;
bibliographyFileCache.put(filename, result);
}
return result;
}
/**
* Run a test from the test suite
* @throws IOException if an I/O error occurred
*/
@Test
@SuppressWarnings("unchecked")
public void run() throws IOException {
String mode = (String)data.get("mode");
String style = (String)data.get("style");
String experimentalModeEnabled = (String)data.get("experimentalMode");
assumeFalse("only".equals(experimentalModeEnabled) && !experimentalMode);
// get bibliography file
ItemDataProvider itemDataProvider = null;
String bibliographyFile = (String)data.get("bibliographyFile");
if (bibliographyFile != null) {
itemDataProvider = loadBibliographyFile(bibliographyFile);
}
// get item data
List<Map<String, Object>> rawItems = (List<Map<String, Object>>)data.get("items");
if (rawItems != null && bibliographyFile != null) {
throw new IllegalStateException("Found both `bibliographyFile' " +
"and `items'. Define only one of them.");
}
// convert item data
if (rawItems != null) {
CSLItemData[] items = new CSLItemData[rawItems.size()];
for (int i = 0; i < items.length; ++i) {
items[i] = CSLItemData.fromJson(rawItems.get(i));
}
itemDataProvider = new ListItemDataProvider(items);
}
if (itemDataProvider == null) {
throw new IllegalStateException("Either `bibliographyFile' or " +
"`items' must be specified.");
}
// get the item IDs to test against
List<String> itemIdsList = (List<String>)data.get("itemIds");
String[] itemIds;
if (itemIdsList == null) {
itemIds = itemDataProvider.getIds();
} else {
itemIds = itemIdsList.toArray(new String[0]);
}
// get the raw citations
List<Map<String, Object>> rawCitations = (List<Map<String, Object>>)data.get("citations");
if (rawCitations != null && !"citation".equals(mode)) {
throw new IllegalStateException("`citations' can only be defined " +
"if `mode' equals `citation'.");
}
if (rawCitations != null && itemIdsList != null) {
throw new IllegalStateException("Found both `itemIds' and " +
"`citations'. Define only one of them.");
}
// converts citations
List<CSLCitation> citations = null;
if (rawCitations != null) {
citations = new ArrayList<>();
for (Map<String, Object> raw : rawCitations) {
citations.add(CSLCitation.fromJson(raw));
}
}
// create CSL processor
CSL citeproc = new CSL(itemDataProvider, style, experimentalMode);
citeproc.setOutputFormat(outputFormat);
// register citation items
citeproc.registerCitationItems(itemIds);
String actualResult;
if ("bibliography".equals(mode)) {
Bibliography bibl = citeproc.makeBibliography();
actualResult = bibl.makeString();
} else if ("citation".equals(mode)) {
List<Citation> generatedCitations = new ArrayList<>();
if (citations != null) {
for (CSLCitation c : citations) {
generatedCitations.addAll(citeproc.makeCitation(c));
}
} else {
generatedCitations.addAll(citeproc.makeCitation(itemIds));
}
actualResult = generatedCitations.stream()
.map(Citation::getText)
.collect(Collectors.joining("\n"));
} else {
throw new IllegalStateException("Unknown mode: " + mode);
}
// compare result
assertEquals(expectedResult, actualResult);
}
}
|
citeproc-java/src/test/java/de/undercouch/citeproc/FixturesTest.java
|
package de.undercouch.citeproc;
import de.undercouch.citeproc.bibtex.BibTeXConverter;
import de.undercouch.citeproc.bibtex.BibTeXItemDataProvider;
import de.undercouch.citeproc.csl.CSLItemData;
import de.undercouch.citeproc.output.Bibliography;
import de.undercouch.citeproc.output.Citation;
import org.jbibtex.BibTeXDatabase;
import org.jbibtex.ParseException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.yaml.snakeyaml.Yaml;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.zip.GZIPInputStream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assume.assumeFalse;
@RunWith(Parameterized.class)
public class FixturesTest {
private static final String FIXTURES_DIR = "/fixtures";
private static final Map<String, ItemDataProvider> bibliographyFileCache = new HashMap<>();
/**
* {@code true} if the test should be run in experimental mode
*/
private boolean experimentalMode;
/**
* The output format to generate
*/
private String outputFormat;
/**
* The expected rendered result
*/
private String expectedResult;
/**
* The test data
*/
private Map<String, Object> data;
/**
* Get all test files
*/
@Parameterized.Parameters(name = "{0}, {1}, {2}")
@SuppressWarnings("unchecked")
public static Iterable<Object[]> data() {
URL fixturesUrl = CSL.class.getResource(FIXTURES_DIR);
File fixturesDir = new File(fixturesUrl.getPath());
// noinspection ConstantConditions
return Arrays.stream(fixturesDir.listFiles((dir, name) -> name.endsWith(".yaml")))
.flatMap(f -> {
Map<String, Object> data;
Yaml yaml = new Yaml();
try (FileInputStream is = new FileInputStream(f)) {
data = yaml.loadAs(is, Map.class);
} catch (IOException e) {
throw new RuntimeException(e);
}
Object expectedResultObj = data.get("result");
if (expectedResultObj instanceof String) {
String str = (String)expectedResultObj;
Map<String, String> map = new HashMap<>();
map.put("text", str);
expectedResultObj = map;
}
Map<String, String> expectedResults = (Map<String, String>)expectedResultObj;
return Stream.of(true, false).flatMap(experimentalMode ->
expectedResults.entrySet().stream().map(expectedResult ->
new Object[] {
f.getName().substring(0, f.getName().length() - 5),
experimentalMode,
expectedResult.getKey(),
expectedResult.getValue(),
data
}
)
);
})
.collect(Collectors.toList());
}
/**
* Create a new test
* @param name the name of the test file
* @param experimentalMode {@code true} if the test should be run in
* experimental mode
* @param outputFormat the output format to generate
* @param expectedResult the expected rendered result
* @param data the test data
*/
public FixturesTest(@SuppressWarnings("unused") String name, boolean experimentalMode,
String outputFormat, String expectedResult, Map<String, Object> data) {
this.experimentalMode = experimentalMode;
this.outputFormat = outputFormat;
this.expectedResult = expectedResult;
this.data = data;
}
private static ItemDataProvider loadBibliographyFile(String filename) throws IOException {
ItemDataProvider result = bibliographyFileCache.get(filename);
if (result == null) {
BibTeXDatabase db;
try (InputStream is = FixturesTest.class.getResourceAsStream(filename);
BufferedInputStream bis = new BufferedInputStream(is)) {
InputStream tis = bis;
if (filename.endsWith(".gz")) {
tis = new GZIPInputStream(bis);
}
db = new BibTeXConverter().loadDatabase(tis);
} catch (ParseException e) {
throw new IOException(e);
}
BibTeXItemDataProvider r = new BibTeXItemDataProvider();
r.addDatabase(db);
result = r;
bibliographyFileCache.put(filename, result);
}
return result;
}
/**
* Run a test from the test suite
* @throws IOException if an I/O error occurred
*/
@Test
@SuppressWarnings("unchecked")
public void run() throws IOException {
String mode = (String)data.get("mode");
String style = (String)data.get("style");
String experimentalModeEnabled = (String)data.get("experimentalMode");
assumeFalse("only".equals(experimentalModeEnabled) && !experimentalMode);
ItemDataProvider itemDataProvider;
String bibliographyFile = (String)data.get("bibliographyFile");
if (bibliographyFile != null) {
itemDataProvider = loadBibliographyFile(bibliographyFile);
} else {
// convert item data
List<Map<String, Object>> rawItems = (List<Map<String, Object>>)data.get("items");
CSLItemData[] items = new CSLItemData[rawItems.size()];
for (int i = 0; i < items.length; ++i) {
items[i] = CSLItemData.fromJson(rawItems.get(i));
}
itemDataProvider = new ListItemDataProvider(items);
}
List<String> itemIdsList = (List<String>)data.get("itemIds");
String[] itemIds;
if (itemIdsList == null) {
itemIds = itemDataProvider.getIds();
} else {
itemIds = itemIdsList.toArray(new String[0]);
}
// create CSL processor
CSL citeproc = new CSL(itemDataProvider, style, experimentalMode);
citeproc.setOutputFormat(outputFormat);
// register citation items
citeproc.registerCitationItems(itemIds);
String actualResult;
if ("bibliography".equals(mode)) {
Bibliography bibl = citeproc.makeBibliography();
actualResult = bibl.makeString();
} else if ("citation".equals(mode)) {
List<Citation> citations = citeproc.makeCitation(itemIds);
actualResult = citations.stream()
.map(Citation::getText)
.collect(Collectors.joining("\n"));
} else {
throw new IllegalStateException("Unknown mode: " + mode);
}
// compare result
assertEquals(expectedResult, actualResult);
}
}
|
Support citation items in fixtures test
|
citeproc-java/src/test/java/de/undercouch/citeproc/FixturesTest.java
|
Support citation items in fixtures test
|
|
Java
|
apache-2.0
|
eb74e510e05533956b01e81e60050e65b7999a5d
| 0
|
eyal-lezmy/Android-DataLib
|
/*
* Copyright (C) 2012 Eyal LEZMY (http://www.eyal.fr)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.eyal.lib.data.processor;
import java.util.ArrayList;
import org.apache.http.Header;
import android.content.Context;
import fr.eyal.lib.data.communication.rest.AndroidHttpClient;
import fr.eyal.lib.data.communication.rest.HttpDelete;
import fr.eyal.lib.data.communication.rest.HttpGet;
import fr.eyal.lib.data.communication.rest.HttpHead;
import fr.eyal.lib.data.communication.rest.HttpPost;
import fr.eyal.lib.data.communication.rest.HttpPut;
import fr.eyal.lib.data.communication.rest.HttpRequest;
import fr.eyal.lib.data.communication.rest.HttpResponse;
import fr.eyal.lib.data.model.ResponseBusinessObject;
import fr.eyal.lib.data.parser.GenericHandler;
import fr.eyal.lib.data.parser.GenericParser;
import fr.eyal.lib.data.service.model.BusinessResponse;
import fr.eyal.lib.data.service.model.DataLibRequest;
import fr.eyal.lib.data.service.model.DataLibResponse;
import fr.eyal.lib.util.Out;
/**
* @author Eyal LEZMY
*/
public class Processor {
private static final String CONNECTIVITY_ERROR_MESSAGE = "Network is not accesible.";
private static final String TAG = "Processor";
protected ProcessorResponseHandler mHandler;
protected ArrayList<Header> mHeaders;
public Processor(final ProcessorResponseHandler handler, final ArrayList<Header> cookies) {
mHandler = handler;
mHeaders = cookies;
}
/**
* Start to reach and process the data to get a {@link ResponseBusinessObject}
*
* @param request the request information
* @param isConnected define is the device can access to the network
* @param handler the object the will handle the parsing
* @param context
*/
public void start(final DataLibRequest request, final boolean isConnected, final GenericHandler handler, final Context context) {
Out.d(TAG, "startProcessor " + isConnected);
//we create the response to send at the end of this function
final DataLibResponse response = new DataLibResponse();
response.request = request;
//if the device have an active network connection
if (isConnected) {
try {
//we send the request on the network and get the result
final HttpResponse result = doNetwork(request, context);
//we clear the cookie manager id needed
if (!request.isConservingTheCookies())
AndroidHttpClient.flushCookieManager();
response.returnCode = result.getStatus();
ResponseBusinessObject businessObjectNetwork = null;
//we parse the result to get a BusinessResponse
final GenericParser parser = new GenericParser(handler);
parser.parseSheet(result.getBody(), request.parseType);
//we finish to fill the BusinessObject and save it
businessObjectNetwork = handler.getParsedData();
//if we have to save the result
if (request.isDatabaseCacheEnabled())
businessObjectNetwork.save(request);
//we build the response to return
response.headers = result.getHeaders();
response.status = BusinessResponse.STATUS_OK;
response.response = businessObjectNetwork;
} catch (final Exception e) {
e.printStackTrace();
response.status = BusinessResponse.STATUS_ERROR;
response.statusMessage = e.getMessage();
}
} else {
response.status = BusinessResponse.STATUS_ERROR;
response.statusMessage = CONNECTIVITY_ERROR_MESSAGE;
}
//we return the response
mHandler.handleProcessorResponse(response);
}
/**
* Process the network request. This method implements the REST, SOAP, or other network processing
* For now, it implements only REST requests
*
* @param request the request to send
* @param context Context of execution
*
* @return
*/
public HttpResponse doNetwork(final DataLibRequest request, final Context context) {
//TODO manage other kinds of network requests (SOAP, ...)
AndroidHttpClient httpClient = new AndroidHttpClient(request.url);
HttpRequest httpRequest = null;
//we create the request, depending on the method
switch (request.requestMethod) {
case DataLibRequest.HTTP_REST_DELETE:
httpRequest = new HttpDelete(request.path, request.params);
break;
case DataLibRequest.HTTP_REST_GET:
httpRequest = new HttpGet(request.path, request.params);
break;
case DataLibRequest.HTTP_REST_HEAD:
httpRequest = new HttpHead(request.path, request.params);
break;
case DataLibRequest.HTTP_REST_POST:
if (request.data != null && request.data.length > 0)
httpRequest = new HttpPost(request.path, request.params, request.contentType, request.data);
else
httpRequest = new HttpPost(request.path, request.params);
break;
case DataLibRequest.HTTP_REST_PUT:
if (request.data != null && request.data.length > 0)
httpRequest = new HttpPut(request.path, request.params, request.contentType, request.data);
else
httpRequest = new HttpPut(request.path, request.params);
break;
default:
httpRequest = new HttpGet(request.path, request.params);
break;
}
HttpResponse httpResponse = httpClient.execute(httpRequest);
Out.d(TAG, "" + httpResponse.getBodyAsString());
return httpResponse;
}
}
|
Android-DataLib-Library/src/fr/eyal/lib/data/processor/Processor.java
|
/*
* Copyright (C) 2012 Eyal LEZMY (http://www.eyal.fr)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.eyal.lib.data.processor;
import java.util.ArrayList;
import org.apache.http.Header;
import android.content.Context;
import fr.eyal.lib.data.communication.rest.AndroidHttpClient;
import fr.eyal.lib.data.communication.rest.HttpDelete;
import fr.eyal.lib.data.communication.rest.HttpGet;
import fr.eyal.lib.data.communication.rest.HttpHead;
import fr.eyal.lib.data.communication.rest.HttpPost;
import fr.eyal.lib.data.communication.rest.HttpPut;
import fr.eyal.lib.data.communication.rest.HttpRequest;
import fr.eyal.lib.data.communication.rest.HttpResponse;
import fr.eyal.lib.data.model.ResponseBusinessObject;
import fr.eyal.lib.data.parser.GenericHandler;
import fr.eyal.lib.data.parser.GenericParser;
import fr.eyal.lib.data.service.model.BusinessResponse;
import fr.eyal.lib.data.service.model.DataLibRequest;
import fr.eyal.lib.data.service.model.DataLibResponse;
import fr.eyal.lib.util.Out;
/**
* @author Eyal LEZMY
*/
public class Processor {
private static final String CONNECTIVITY_ERROR_MESSAGE = "Network is not accesible.";
private static final String TAG = "Processor";
protected ProcessorResponseHandler mHandler;
protected ArrayList<Header> mHeaders;
public Processor(final ProcessorResponseHandler handler, final ArrayList<Header> cookies) {
mHandler = handler;
mHeaders = cookies;
}
/**
* Start to reach and process the data to get a {@link ResponseBusinessObject}
*
* @param request the request information
* @param isConnected define is the device can access to the network
* @param handler the object the will handle the parsing
* @param context
*/
public void start(final DataLibRequest request, final boolean isConnected, final GenericHandler handler, final Context context) {
Out.d(TAG, "startProcessor " + isConnected);
//we create the response to send at the end of this function
final DataLibResponse response = new DataLibResponse();
response.request = request;
//if the device have an active network connection
if (isConnected) {
try {
//we send the request on the network and get the result
final HttpResponse result = doNetwork(request, context);
//we clear the cookie manager id needed
if (!request.isConservingTheCookies())
AndroidHttpClient.flushCookieManager();
response.returnCode = result.getStatus();
ResponseBusinessObject businessObjectNetwork = null;
//we parse the result to get a BusinessResponse
final GenericParser parser = new GenericParser(handler);
parser.parseSheet(result.getBody(), request.parseType);
//we finish to fill the BusinessObject and save it
businessObjectNetwork = handler.getParsedData();
//if we have to save the result
if (request.isDatabaseCacheEnabled())
businessObjectNetwork.save(request);
//we build the response to return
response.headers = result.getHeaders();
response.status = BusinessResponse.STATUS_OK;
response.response = businessObjectNetwork;
} catch (final Exception e) {
e.printStackTrace();
response.status = BusinessResponse.STATUS_ERROR;
response.statusMessage = e.getMessage();
}
} else {
response.status = BusinessResponse.STATUS_ERROR;
response.statusMessage = CONNECTIVITY_ERROR_MESSAGE;
}
//we return the response
mHandler.handleProcessorResponse(response);
}
/**
* Process the network request. This method implements the REST, SOAP, or other network processing
* For now, it implements only REST requests
*
* @param request the request to send
* @param context Context of execution
*
* @return
*/
public HttpResponse doNetwork(final DataLibRequest request, final Context context) {
//TODO manage other kinds of network requests (SOAP, ...)
AndroidHttpClient httpClient = new AndroidHttpClient(request.url);
HttpRequest httpRequest = null;
//we create the request, depending on the method
switch (request.requestMethod) {
case DataLibRequest.HTTP_REST_DELETE:
httpRequest = new HttpDelete(request.path, request.params);
break;
case DataLibRequest.HTTP_REST_GET:
httpRequest = new HttpGet(request.path, request.params);
break;
case DataLibRequest.HTTP_REST_HEAD:
httpRequest = new HttpHead(request.path, request.params);
break;
case DataLibRequest.HTTP_REST_POST:
if (request.data != null && request.data.length > 0)
httpRequest = new HttpPost(request.path, request.params, request.contentType, request.data);
else
httpRequest = new HttpPost(request.path, request.params);
break;
case DataLibRequest.HTTP_REST_PUT:
if (request.data != null && request.data.length > 0)
httpRequest = new HttpPut(request.path, request.params, request.contentType, request.data);
else
httpRequest = new HttpPut(request.path, request.params);
break;
default:
httpRequest = new HttpGet(request.path, request.params);
break;
}
HttpResponse httpResponse = httpClient.execute(httpRequest);
Out.d(TAG, httpResponse.getBodyAsString());
return httpResponse;
}
}
|
Small NullPointerException fix
|
Android-DataLib-Library/src/fr/eyal/lib/data/processor/Processor.java
|
Small NullPointerException fix
|
|
Java
|
apache-2.0
|
49c5b223825854882e05af5210056cf277eef265
| 0
|
mozvip/subtitles-finder
|
package com.github.mozvip.subtitles.providers;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.Locale;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.github.mozvip.subtitles.model.VideoSource;
import okhttp3.Response;
import org.apache.commons.lang3.StringUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.mozvip.subtitles.EpisodeSubtitlesFinder;
import com.github.mozvip.subtitles.MovieSubtitlesFinder;
import com.github.mozvip.subtitles.RemoteSubTitles;
import com.github.mozvip.subtitles.SubTitlesUtils;
import com.github.mozvip.subtitles.SubTitlesZip;
import com.github.mozvip.subtitles.SubtitlesFinder;
public class SousTitresEU extends SubtitlesFinder implements EpisodeSubtitlesFinder, MovieSubtitlesFinder {
private final static Logger LOGGER = LoggerFactory.getLogger(SousTitresEU.class);
private final static String ROOT_URL = "http://www.sous-titres.eu";
@Override
public RemoteSubTitles downloadEpisodeSubtitle(String showName, int season, int episode, String release, VideoSource source,
Locale locale) throws ExecutionException {
String seriesName = showName.toLowerCase();
Matcher matcher = Pattern.compile("(.*)\\s+\\((19|20\\d{2})\\)").matcher(seriesName);
if (matcher.matches()) {
seriesName = matcher.group(1);
}
String url = null;
if (url == null) {
seriesName = seriesName.replace(' ', '_');
seriesName = seriesName.replaceAll("\\(\\):\\.", "");
url = seriesName;
url = ROOT_URL + "/series/" + url + ".html";
}
Document document;
Elements nodes;
RemoteSubTitles bestSubTitles;
try (Response response = get(url, null, 1, TimeUnit.DAYS).get()) {
if (response.code() == 404) {
LOGGER.warn(String.format("Couldn't find show %s", showName));
return null;
}
document = Jsoup.parse(response.body().string(), url);
} catch (InterruptedException | IOException e) {
throw new ExecutionException(e);
}
nodes = document.select("a > span.episodenum");
bestSubTitles = null;
for (Element node : nodes) {
// gets parent node (TR)
Element tableRow = node.parent();
Elements flagImageNodes = tableRow.select("img");
boolean hasLanguage = false;
for (Element flagImageNode : flagImageNodes) {
String lang = flagImageNode.attr("title");
if (StringUtils.equalsIgnoreCase(lang, locale.getLanguage())) {
hasLanguage = true;
break;
}
}
if (!hasLanguage) {
continue;
}
String text = node.text();
Element link = node.parent();
String href = link.absUrl("href");
if (SubTitlesUtils.isExactMatch(text, season, episode)) {
byte[] bytes = getBytes(href, url);
RemoteSubTitles currentRemoteSubTitles;
try {
currentRemoteSubTitles = SubTitlesZip.selectBestSubtitlesFromZip(this, bytes, release, source, locale);
} catch (IOException e) {
LOGGER.error(e.getMessage(), e);
continue;
}
if (currentRemoteSubTitles != null) {
if (bestSubTitles == null || currentRemoteSubTitles.getScore() > bestSubTitles.getScore()) {
bestSubTitles = currentRemoteSubTitles;
}
}
} else if (SubTitlesUtils.isSeasonMatch(text, season)) {
byte[] bytes = getBytes(href, url);
try {
RemoteSubTitles currentRemoteSubTitles = SubTitlesZip.selectBestSubtitlesFromZip(this, bytes, release, source, locale,
season, episode);
if (currentRemoteSubTitles != null) {
if (bestSubTitles == null || currentRemoteSubTitles.getScore() > bestSubTitles.getScore()) {
bestSubTitles = currentRemoteSubTitles;
}
}
} catch (IOException e) {
LOGGER.error(e.getMessage(), e);
continue;
}
}
}
return bestSubTitles;
}
@Override
public RemoteSubTitles downloadMovieSubtitles(String movieName, int year, String release, VideoSource videoSource, BigDecimal fps,
Locale locale) throws ExecutionException {
String url = String.format("%s/search.html?q=%s+%d", ROOT_URL, movieName, year);
Document document = getDocument(url, null, 1, TimeUnit.DAYS);
Elements nodes = document.select("li.exact > a > span.episodenum");
RemoteSubTitles bestSubTitles = null;
for (Element node : nodes) {
// gets parent node (TR)
Element tableRow = node.parent();
Elements flagImageNodes = tableRow.select("img");
boolean hasLanguage = false;
for (Element flagImageNode : flagImageNodes) {
String lang = flagImageNode.attr("title");
if (StringUtils.equalsIgnoreCase(lang, locale.getLanguage())) {
hasLanguage = true;
break;
}
}
if (!hasLanguage) {
continue;
}
Element link = node.parent();
String href = link.absUrl("href");
try {
byte[] bytes = getBytes(href, url);
RemoteSubTitles currentRemoteSubTitles = SubTitlesZip.selectBestSubtitlesFromZip(this, bytes, release, videoSource, locale);
if (currentRemoteSubTitles != null) {
if (bestSubTitles == null || currentRemoteSubTitles.getScore() > bestSubTitles.getScore()) {
bestSubTitles = currentRemoteSubTitles;
}
}
} catch (IOException e) {
LOGGER.error( e.getMessage(), e );
}
}
return bestSubTitles;
}
}
|
src/main/java/com/github/mozvip/subtitles/providers/SousTitresEU.java
|
package com.github.mozvip.subtitles.providers;
import java.io.IOException;
import java.math.BigDecimal;
import java.util.Locale;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.github.mozvip.subtitles.model.VideoSource;
import okhttp3.Response;
import org.apache.commons.lang3.StringUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.mozvip.subtitles.EpisodeSubtitlesFinder;
import com.github.mozvip.subtitles.MovieSubtitlesFinder;
import com.github.mozvip.subtitles.RemoteSubTitles;
import com.github.mozvip.subtitles.SubTitlesUtils;
import com.github.mozvip.subtitles.SubTitlesZip;
import com.github.mozvip.subtitles.SubtitlesFinder;
public class SousTitresEU extends SubtitlesFinder implements EpisodeSubtitlesFinder, MovieSubtitlesFinder {
private final static Logger LOGGER = LoggerFactory.getLogger(SousTitresEU.class);
private final static String ROOT_URL = "http://www.sous-titres.eu";
@Override
public RemoteSubTitles downloadEpisodeSubtitle(String showName, int season, int episode, String release, VideoSource source,
Locale locale) throws ExecutionException {
String seriesName = showName.toLowerCase();
Matcher matcher = Pattern.compile("(.*)\\s+\\((19|20\\d{2})\\)").matcher(seriesName);
if (matcher.matches()) {
seriesName = matcher.group(1);
}
String url = null;
if (url == null) {
seriesName = seriesName.replace(' ', '_');
seriesName = seriesName.replaceAll("\\(\\):\\.", "");
url = seriesName;
url = ROOT_URL + "/series/" + url + ".html";
}
Document document;
Elements nodes;
RemoteSubTitles bestSubTitles;
try (Response response = get(url, null, 1, TimeUnit.DAYS).get()) {
if (response.code() == 404) {
LOGGER.warn(String.format("Couldn't find show %s", showName));
return null;
}
document = Jsoup.parse(response.body().string(), url);
} catch (InterruptedException | IOException e) {
LOGGER.error(e.getMessage(), e);
return null;
}
nodes = document.select("a > span.episodenum");
bestSubTitles = null;
for (Element node : nodes) {
// gets parent node (TR)
Element tableRow = node.parent();
Elements flagImageNodes = tableRow.select("img");
boolean hasLanguage = false;
for (Element flagImageNode : flagImageNodes) {
String lang = flagImageNode.attr("title");
if (StringUtils.equalsIgnoreCase(lang, locale.getLanguage())) {
hasLanguage = true;
break;
}
}
if (!hasLanguage) {
continue;
}
String text = node.text();
Element link = node.parent();
String href = link.absUrl("href");
if (SubTitlesUtils.isExactMatch(text, season, episode)) {
byte[] bytes = getBytes(href, url);
RemoteSubTitles currentRemoteSubTitles;
try {
currentRemoteSubTitles = SubTitlesZip.selectBestSubtitlesFromZip(this, bytes, release, source, locale);
} catch (IOException e) {
LOGGER.error(e.getMessage(), e);
continue;
}
if (currentRemoteSubTitles != null) {
if (bestSubTitles == null || currentRemoteSubTitles.getScore() > bestSubTitles.getScore()) {
bestSubTitles = currentRemoteSubTitles;
}
}
} else if (SubTitlesUtils.isSeasonMatch(text, season)) {
byte[] bytes = getBytes(href, url);
try {
RemoteSubTitles currentRemoteSubTitles = SubTitlesZip.selectBestSubtitlesFromZip(this, bytes, release, source, locale,
season, episode);
if (currentRemoteSubTitles != null) {
if (bestSubTitles == null || currentRemoteSubTitles.getScore() > bestSubTitles.getScore()) {
bestSubTitles = currentRemoteSubTitles;
}
}
} catch (IOException e) {
LOGGER.error(e.getMessage(), e);
continue;
}
}
}
return bestSubTitles;
}
@Override
public RemoteSubTitles downloadMovieSubtitles(String movieName, int year, String release, VideoSource videoSource, BigDecimal fps,
Locale locale) throws ExecutionException {
String url = String.format("%s/search.html?q=%s+%d", ROOT_URL, movieName, year);
Document document = getDocument(url, null, 1, TimeUnit.DAYS);
Elements nodes = document.select("li.exact > a > span.episodenum");
RemoteSubTitles bestSubTitles = null;
for (Element node : nodes) {
// gets parent node (TR)
Element tableRow = node.parent();
Elements flagImageNodes = tableRow.select("img");
boolean hasLanguage = false;
for (Element flagImageNode : flagImageNodes) {
String lang = flagImageNode.attr("title");
if (StringUtils.equalsIgnoreCase(lang, locale.getLanguage())) {
hasLanguage = true;
break;
}
}
if (!hasLanguage) {
continue;
}
Element link = node.parent();
String href = link.absUrl("href");
try {
byte[] bytes = getBytes(href, url);
RemoteSubTitles currentRemoteSubTitles = SubTitlesZip.selectBestSubtitlesFromZip(this, bytes, release, videoSource, locale);
if (currentRemoteSubTitles != null) {
if (bestSubTitles == null || currentRemoteSubTitles.getScore() > bestSubTitles.getScore()) {
bestSubTitles = currentRemoteSubTitles;
}
}
} catch (IOException e) {
LOGGER.error( e.getMessage(), e );
}
}
return bestSubTitles;
}
}
|
Propagate exception
|
src/main/java/com/github/mozvip/subtitles/providers/SousTitresEU.java
|
Propagate exception
|
|
Java
|
apache-2.0
|
ddf9c47fbe6d2e154825ce8399ea44b88c3c4eec
| 0
|
TLoebner/JavaCalc,TLoebner/JavaCalc
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package calculator;
/**
*
* @author user
*/
public class NumericCalculus {
double[][] function1_points;
double[][] function2_points;
double[] function1_exp;
double[] function2_exp;
double[] error;
char[] function1_type;
char[] function2_type;
/*
public NumericCalculus(double[][] points)
{
//this.function1_points = points;
}
public NumericCalculus(double[] exp)
{
//this.function1_exp = exp;
}
public NumericCalculus(char[] type)
{
//this.function1_type = type;
}
*/
public void set_function1_points(double[][] points)
{
this.function1_points = points;
}
}
|
src/calculator/NumericCalculus.java
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package calculator;
/**
*
* @author user
*/
public class NumericCalculus {
double[][] function1_points;
double[][] function2_points;
double[] function1_exp;
double[] function2_exp;
double[] error;
char[] function1_type;
char[] function2_type;
/*
public NumericCalculus(double[][] points)
{
//this.function1_points = points;
}
public NumericCalculus(double[] exp)
{
//this.function1_exp = exp;
}
public NumericCalculus(char[] type)
{
//this.function1_type = type;
}
*/
}
|
first numa stuff #2
|
src/calculator/NumericCalculus.java
|
first numa stuff #2
|
|
Java
|
apache-2.0
|
e59c1c40894b01015aa3485039da02e2a80c18f6
| 0
|
RackerWilliams/xercesj,ronsigal/xerces,ronsigal/xerces,jimma/xerces,ronsigal/xerces,jimma/xerces,jimma/xerces,RackerWilliams/xercesj,RackerWilliams/xercesj
|
/*
* The Apache Software License, Version 1.1
*
*
* Copyright (c) 1999 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Xerces" and "Apache Software Foundation" must
* not be used to endorse or promote products derived from this
* software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache",
* nor may "Apache" appear in their name, without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation and was
* originally based on software copyright (c) 1999, International
* Business Machines, Inc., http://www.apache.org. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
package org.apache.xerces.parsers;
import java.io.IOException;
import java.util.Hashtable;
import java.util.StringTokenizer;
import org.apache.xerces.dom.TextImpl;
import org.apache.xerces.framework.XMLAttrList;
import org.apache.xerces.framework.XMLContentSpecNode;
import org.apache.xerces.framework.XMLParser;
import org.apache.xerces.framework.XMLValidator;
import org.apache.xerces.readers.XMLEntityHandler;
import org.apache.xerces.utils.StringPool;
import org.apache.xerces.validators.schema.XUtil;
import org.apache.xerces.dom.DeferredDocumentImpl;
import org.apache.xerces.dom.DocumentImpl;
import org.apache.xerces.dom.DocumentTypeImpl;
import org.apache.xerces.dom.NodeImpl;
import org.apache.xerces.dom.EntityImpl;
import org.apache.xerces.dom.NotationImpl;
import org.apache.xerces.dom.ElementDefinitionImpl;
import org.apache.xerces.dom.AttrImpl;
import org.apache.xerces.dom.TextImpl;
import org.apache.xerces.dom.ElementImpl;
import org.w3c.dom.Attr;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Element;
import org.w3c.dom.Entity;
import org.w3c.dom.EntityReference;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
// REVISIT: Change use of AttributeList to Attributes. -Ac
import org.xml.sax.AttributeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
import org.xml.sax.SAXParseException;
/**
* DOMParser provides a parser which produces a W3C DOM tree as its output
*
* @version
*/
public class DOMParser
extends XMLParser
{
//
// Constants
//
// public
/** Default programmatic document class name (org.apache.xerces.dom.DocumentImpl). */
public static final String DEFAULT_DOCUMENT_CLASS_NAME = "org.apache.xerces.dom.DocumentImpl";
// debugging
/** Set to true to debug attribute list declaration calls. */
private static final boolean DEBUG_ATTLIST_DECL = false;
// features and properties
/** Features recognized by this parser. */
private static final String RECOGNIZED_FEATURES[] = {
// SAX2 core features
// Xerces features
"http://apache.org/xml/features/dom/defer-node-expansion",
"http://apache.org/xml/features/dom/create-entity-ref-nodes",
"http://apache.org/xml/features/dom/include-ignorable-whitespace",
// Experimental features
"http://apache.org/xml/features/domx/grammar-access",
};
/** Properties recognized by this parser. */
private static final String RECOGNIZED_PROPERTIES[] = {
// SAX2 core properties
// Xerces properties
"http://apache.org/xml/properties/dom/document-class-name",
"http://apache.org/xml/properties/dom/current-element-node",
};
/** For experimental grammar access. */
private static final Hashtable TYPES = new Hashtable();
//
// Data
//
// common data
protected Document fDocument;
// deferred expansion data
protected DeferredDocumentImpl fDeferredDocumentImpl;
protected int fDocumentIndex;
protected int fDocumentTypeIndex;
protected int fCurrentNodeIndex;
// full expansion data
protected DocumentImpl fDocumentImpl;
protected DocumentType fDocumentType;
protected Node fCurrentElementNode;
// state
protected boolean fWithinElement;
protected boolean fInCDATA;
// features
private boolean fGrammarAccess;
// properties
// REVISIT: Even though these have setters and getters, should they
// be protected visibility? -Ac
private String fDocumentClassName;
private boolean fDeferNodeExpansion;
private boolean fCreateEntityReferenceNodes;
private boolean fIncludeIgnorableWhitespace;
// built-in entities
protected int fAmpIndex;
protected int fLtIndex;
protected int fGtIndex;
protected int fAposIndex;
protected int fQuotIndex;
private boolean fSeenRootElement;
private XMLAttrList fAttrList;
//
// Static initializer
//
static {
String types[][] = {
{ "CDATA", "minOccurs", "maxOccurs" },
{ "ENUMERATION", "collection", "order", "export" },
{ "NMTOKEN", "name", "ref" },
};
for (int i = 0; i < types.length; i++) {
String typeName = types[i][0];
for (int j = 1; j < types[i].length; j++) {
TYPES.put(types[i][j], typeName);
}
}
}
//
// Constructors
//
/** Default constructor. */
public DOMParser() {
// setup parser state
init();
// set default values
try {
setDocumentClassName(DEFAULT_DOCUMENT_CLASS_NAME);
setCreateEntityReferenceNodes(true);
setDeferNodeExpansion(true);
setIncludeIgnorableWhitespace(true);
} catch (SAXException e) {
throw new RuntimeException("PAR001 Fatal error constructing DOMParser.");
}
} // <init>()
//
// Public methods
//
// document
/** Returns the document. */
public Document getDocument() {
if (fDocumentImpl != null) {
fDocumentImpl.setErrorChecking(true);
}
return fDocument;
}
// features and properties
/**
* Returns a list of features that this parser recognizes.
* This method will never return null; if no features are
* recognized, this method will return a zero length array.
*
* @see #isFeatureRecognized
* @see #setFeature
* @see #getFeature
*/
public String[] getFeaturesRecognized() {
// get features that super/this recognizes
String superRecognized[] = super.getFeaturesRecognized();
String thisRecognized[] = RECOGNIZED_FEATURES;
// is one or the other the empty set?
int thisLength = thisRecognized.length;
if (thisLength == 0) {
return superRecognized;
}
int superLength = superRecognized.length;
if (superLength == 0) {
return thisRecognized;
}
// combine the two lists and return
String recognized[] = new String[superLength + thisLength];
System.arraycopy(superRecognized, 0, recognized, 0, superLength);
System.arraycopy(thisRecognized, 0, recognized, superLength, thisLength);
return recognized;
} // getFeaturesRecognized():String[]
/**
* Returns a list of properties that this parser recognizes.
* This method will never return null; if no properties are
* recognized, this method will return a zero length array.
*
* @see #isPropertyRecognized
* @see #setProperty
* @see #getProperty
*/
public String[] getPropertiesRecognized() {
// get properties that super/this recognizes
String superRecognized[] = super.getPropertiesRecognized();
String thisRecognized[] = RECOGNIZED_PROPERTIES;
// is one or the other the empty set?
int thisLength = thisRecognized.length;
if (thisLength == 0) {
return superRecognized;
}
int superLength = superRecognized.length;
if (superLength == 0) {
return thisRecognized;
}
// combine the two lists and return
String recognized[] = new String[superLength + thisLength];
System.arraycopy(superRecognized, 0, recognized, 0, superLength);
System.arraycopy(thisRecognized, 0, recognized, superLength, thisLength);
return recognized;
}
// resetting
/** Resets the parser. */
public void reset() throws Exception {
super.reset();
init();
}
/** Resets or copies the parser. */
public void resetOrCopy() throws Exception {
super.resetOrCopy();
init();
}
//
// Protected methods
//
// initialization
/**
* Initializes the parser to a pre-parse state. This method is
* called between calls to <code>parse()</code>.
*/
protected void init() {
// init common
fDocument = null;
// init deferred expansion
fDeferredDocumentImpl = null;
fDocumentIndex = -1;
fDocumentTypeIndex = -1;
fCurrentNodeIndex = -1;
// init full expansion
fDocumentImpl = null;
fDocumentType = null;
fCurrentElementNode = null;
// state
fWithinElement = false;
fInCDATA = false;
// built-in entities
fAmpIndex = fStringPool.addSymbol("amp");
fLtIndex = fStringPool.addSymbol("lt");
fGtIndex = fStringPool.addSymbol("gt");
fAposIndex = fStringPool.addSymbol("apos");
fQuotIndex = fStringPool.addSymbol("quot");
setSendCharDataAsCharArray(false);
fSeenRootElement = false;
fAttrList = new XMLAttrList(fStringPool);
} // init()
// features
/**
* This method sets whether the expansion of the nodes in the default
* DOM implementation are deferred.
*
* @see #getDeferNodeExpansion
* @see #setDocumentClassName
*/
protected void setDeferNodeExpansion(boolean deferNodeExpansion)
throws SAXNotRecognizedException, SAXNotSupportedException {
fDeferNodeExpansion = deferNodeExpansion;
}
/**
* Returns true if the expansion of the nodes in the default DOM
* implementation are deferred.
*
* @see #setDeferNodeExpansion
*/
protected boolean getDeferNodeExpansion()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fDeferNodeExpansion;
}
/**
* This feature determines whether entity references within
* the document are included in the document tree as
* EntityReference nodes.
* <p>
* Note: The children of the entity reference are always
* added to the document. This feature only affects
* whether an EntityReference node is also included
* as the parent of the entity reference children.
*
* @param create True to create entity reference nodes; false
* to only insert the entity reference children.
*
* @see #getCreateEntityReferenceNodes
*/
protected void setCreateEntityReferenceNodes(boolean create)
throws SAXNotRecognizedException, SAXNotSupportedException {
fCreateEntityReferenceNodes = create;
}
/**
* Returns true if entity references within the document are
* included in the document tree as EntityReference nodes.
*
* @see #setCreateEntityReferenceNodes
*/
public boolean getCreateEntityReferenceNodes()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fCreateEntityReferenceNodes;
}
/**
* This feature determines whether text nodes that can be
* considered "ignorable whitespace" are included in the DOM
* tree.
* <p>
* Note: The only way that the parser can determine if text
* is ignorable is by reading the associated grammar
* and having a content model for the document. When
* ignorable whitespace text nodes *are* included in
* the DOM tree, they will be flagged as ignorable.
* The ignorable flag can be queried by calling the
* TextImpl#isIgnorableWhitespace():boolean method.
*
* @param include True to include ignorable whitespace text nodes;
* false to not include ignorable whitespace text
* nodes.
*
* @see #getIncludeIgnorableWhitespace
*/
public void setIncludeIgnorableWhitespace(boolean include)
throws SAXNotRecognizedException, SAXNotSupportedException {
fIncludeIgnorableWhitespace = include;
}
/**
* Returns true if ignorable whitespace text nodes are included
* in the DOM tree.
*
* @see #setIncludeIgnorableWhitespace
*/
public boolean getIncludeIgnorableWhitespace()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fIncludeIgnorableWhitespace;
}
// properties
/**
* This method allows the programmer to decide which document
* factory to use when constructing the DOM tree. However, doing
* so will lose the functionality of the default factory. Also,
* a document class other than the default will lose the ability
* to defer node expansion on the DOM tree produced.
*
* @param documentClassName The fully qualified class name of the
* document factory to use when constructing
* the DOM tree.
*
* @see #getDocumentClassName
* @see #setDeferNodeExpansion
* @see #DEFAULT_DOCUMENT_CLASS_NAME
*/
protected void setDocumentClassName(String documentClassName)
throws SAXNotRecognizedException, SAXNotSupportedException {
// normalize class name
if (documentClassName == null) {
documentClassName = DEFAULT_DOCUMENT_CLASS_NAME;
}
// verify that this class exists and is of the right type
try {
Class _class = Class.forName(documentClassName);
//if (!_class.isAssignableFrom(Document.class)) {
if (!Document.class.isAssignableFrom(_class)) {
throw new IllegalArgumentException("PAR002 Class, \""+documentClassName+"\", is not of type org.w3c.dom.Document."+"\n"+documentClassName);
}
}
catch (ClassNotFoundException e) {
throw new IllegalArgumentException("PAR003 Class, \""+documentClassName+"\", not found."+"\n"+documentClassName);
}
// set document class name
fDocumentClassName = documentClassName;
if (!documentClassName.equals(DEFAULT_DOCUMENT_CLASS_NAME)) {
setDeferNodeExpansion(false);
}
} // setDocumentClassName(String)
/**
* Returns the fully qualified class name of the document factory
* used when constructing the DOM tree.
*
* @see #setDocumentClassName
*/
protected String getDocumentClassName()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fDocumentClassName;
}
/**
* Returns the current element node.
* <p>
* Note: This method is not supported when the "deferNodeExpansion"
* property is set to true and the document factory is set to
* the default factory.
*/
protected Element getCurrentElementNode()
throws SAXNotRecognizedException, SAXNotSupportedException {
if (fCurrentElementNode != null &&
fCurrentElementNode.getNodeType() == Node.ELEMENT_NODE) {
return (Element)fCurrentElementNode;
}
return null;
} // getCurrentElementNode():Element
//
// Configurable methods
//
/**
* Set the state of any feature in a SAX2 parser. The parser
* might not recognize the feature, and if it does recognize
* it, it might not be able to fulfill the request.
*
* @param featureId The unique identifier (URI) of the feature.
* @param state The requested state of the feature (true or false).
*
* @exception SAXNotRecognizedException If the requested feature is
* not known.
* @exception SAXNotSupportedException If the requested feature is
* known, but the requested state
* is not supported.
*/
public void setFeature(String featureId, boolean state)
throws SAXNotRecognizedException, SAXNotSupportedException {
//
// SAX2 core features
//
if (featureId.startsWith(SAX2_FEATURES_PREFIX)) {
//
// No additional SAX properties defined for DOMParser.
// Pass request off to XMLParser for the common cases.
//
}
//
// Xerces features
//
else if (featureId.startsWith(XERCES_FEATURES_PREFIX)) {
String feature = featureId.substring(XERCES_FEATURES_PREFIX.length());
//
// http://apache.org/xml/features/dom/defer-node-expansion
// Allows the document tree returned by getDocument()
// to be constructed lazily. In other words, the DOM
// nodes are constructed as the tree is traversed.
// This allows the document to be returned sooner with
// the expense of holding all of the blocks of character
// data held in memory. Then again, lots of DOM nodes
// use a lot of memory as well.
//
if (feature.equals("dom/defer-node-expansion")) {
if (fParseInProgress) {
throw new SAXNotSupportedException("PAR004 Cannot setFeature("+featureId + "): parse is in progress."+"\n"+featureId);
}
setDeferNodeExpansion(state);
return;
}
//
// http://apache.org/xml/features/dom/create-entity-ref-nodes
// This feature determines whether entity references within
// the document are included in the document tree as
// EntityReference nodes.
// Note: The children of the entity reference are always
// added to the document. This feature only affects
// whether an EntityReference node is also included
// as the parent of the entity reference children.
//
if (feature.equals("dom/create-entity-ref-nodes")) {
setCreateEntityReferenceNodes(state);
return;
}
//
// http://apache.org/xml/features/dom/include-ignorable-whitespace
// This feature determines whether text nodes that can be
// considered "ignorable whitespace" are included in the DOM
// tree.
// Note: The only way that the parser can determine if text
// is ignorable is by reading the associated grammar
// and having a content model for the document. When
// ignorable whitespace text nodes *are* included in
// the DOM tree, they will be flagged as ignorable.
// The ignorable flag can be queried by calling the
// TextImpl#isIgnorableWhitespace():boolean method.
//
if (feature.equals("dom/include-ignorable-whitespace")) {
setIncludeIgnorableWhitespace(state);
return;
}
//
// Experimental features
//
//
// http://apache.org/xml/features/domx/grammar-access
// Allows grammar access in the DOM tree. Currently, this
// means that there is an XML Schema document tree as a
// child of the Doctype node.
//
if (feature.equals("domx/grammar-access")) {
fGrammarAccess = state;
return;
}
//
// Pass request off to XMLParser for the common cases.
//
}
//
// Pass request off to XMLParser for the common cases.
//
super.setFeature(featureId, state);
} // setFeature(String,boolean)
/**
* Query the current state of any feature in a SAX2 parser. The
* parser might not recognize the feature.
*
* @param featureId The unique identifier (URI) of the feature
* being set.
*
* @return The current state of the feature.
*
* @exception SAXNotRecognizedException If the requested feature is
* not known.
*/
public boolean getFeature(String featureId)
throws SAXNotRecognizedException, SAXNotSupportedException {
//
// SAX2 core features
//
if (featureId.startsWith(SAX2_FEATURES_PREFIX)) {
//
// No additional SAX properties defined for DOMParser.
// Pass request off to XMLParser for the common cases.
//
}
//
// Xerces features
//
else if (featureId.startsWith(XERCES_FEATURES_PREFIX)) {
String feature = featureId.substring(XERCES_FEATURES_PREFIX.length());
//
// http://apache.org/xml/features/dom/defer-node-expansion
// Allows the document tree returned by getDocument()
// to be constructed lazily. In other words, the DOM
// nodes are constructed as the tree is traversed.
// This allows the document to be returned sooner with
// the expense of holding all of the blocks of character
// data held in memory. Then again, lots of DOM nodes
// use a lot of memory as well.
//
if (feature.equals("dom/defer-node-expansion")) {
return getDeferNodeExpansion();
}
//
// http://apache.org/xml/features/dom/create-entity-ref-nodes
// This feature determines whether entity references within
// the document are included in the document tree as
// EntityReference nodes.
// Note: The children of the entity reference are always
// added to the document. This feature only affects
// whether an EntityReference node is also included
// as the parent of the entity reference children.
//
else if (feature.equals("dom/create-entity-ref-nodes")) {
return getCreateEntityReferenceNodes();
}
//
// http://apache.org/xml/features/dom/include-ignorable-whitespace
// This feature determines whether text nodes that can be
// considered "ignorable whitespace" are included in the DOM
// tree.
// Note: The only way that the parser can determine if text
// is ignorable is by reading the associated grammar
// and having a content model for the document. When
// ignorable whitespace text nodes *are* included in
// the DOM tree, they will be flagged as ignorable.
// The ignorable flag can be queried by calling the
// TextImpl#isIgnorableWhitespace():boolean method.
//
if (feature.equals("dom/include-ignorable-whitespace")) {
return getIncludeIgnorableWhitespace();
}
//
// Experimental features
//
//
// http://apache.org/xml/features/domx/grammar-access
// Allows grammar access in the DOM tree. Currently, this
// means that there is an XML Schema document tree as a
// child of the Doctype node.
//
if (feature.equals("domx/grammar-access")) {
return fGrammarAccess;
}
//
// Pass request off to XMLParser for the common cases.
//
}
//
// Pass request off to XMLParser for the common cases.
//
return super.getFeature(featureId);
} // getFeature(String):boolean
/**
* Set the value of any property in a SAX2 parser. The parser
* might not recognize the property, and if it does recognize
* it, it might not support the requested value.
*
* @param propertyId The unique identifier (URI) of the property
* being set.
* @param Object The value to which the property is being set.
*
* @exception SAXNotRecognizedException If the requested property is
* not known.
* @exception SAXNotSupportedException If the requested property is
* known, but the requested
* value is not supported.
*/
public void setProperty(String propertyId, Object value)
throws SAXNotRecognizedException, SAXNotSupportedException {
//
// Xerces properties
//
if (propertyId.startsWith(XERCES_PROPERTIES_PREFIX)) {
String property = propertyId.substring(XERCES_PROPERTIES_PREFIX.length());
//
// http://apache.org/xml/properties/dom/current-element-node
// Returns the current element node as the DOM Parser is
// parsing. This property is useful for determining the
// relative location of the document when an error is
// encountered. Note that this feature does *not* work
// when the http://apache.org/xml/features/dom/defer-node-expansion
// is set to true.
//
if (property.equals("dom/current-element-node")) {
throw new SAXNotSupportedException("PAR005 Property, \""+propertyId+"\" is read-only.\n"+propertyId);
}
//
// http://apache.org/xml/properties/dom/document-class-name
// This property can be used to set/query the name of the
// document factory.
//
else if (property.equals("dom/document-class-name")) {
if (value != null && !(value instanceof String)) {
throw new SAXNotSupportedException("PAR006 Property value must be of type java.lang.String.");
}
setDocumentClassName((String)value);
return;
}
}
//
// Pass request off to XMLParser for the common cases.
//
super.setProperty(propertyId, value);
} // setProperty(String,Object)
/**
* Return the current value of a property in a SAX2 parser.
* The parser might not recognize the property.
*
* @param propertyId The unique identifier (URI) of the property
* being set.
*
* @return The current value of the property.
*
* @exception SAXNotRecognizedException If the requested property is
* not known.
*
* @see Configurable#getProperty
*/
public Object getProperty(String propertyId)
throws SAXNotRecognizedException, SAXNotSupportedException {
//
// Xerces properties
//
if (propertyId.startsWith(XERCES_PROPERTIES_PREFIX)) {
String property = propertyId.substring(XERCES_PROPERTIES_PREFIX.length());
//
// http://apache.org/xml/properties/dom/current-element-node
// Returns the current element node as the DOM Parser is
// parsing. This property is useful for determining the
// relative location of the document when an error is
// encountered. Note that this feature does *not* work
// when the http://apache.org/xml/features/dom/defer-node-expansion
// is set to true.
//
if (property.equals("dom/current-element-node")) {
boolean throwException = false;
try {
throwException = getFeature(XERCES_FEATURES_PREFIX+"dom/defer-node-expansion");
}
catch (SAXNotSupportedException e) {
// ignore
}
catch (SAXNotRecognizedException e) {
// ignore
}
if (throwException) {
throw new SAXNotSupportedException("PAR007 Current element node cannot be queried when node expansion is deferred.");
}
return getCurrentElementNode();
}
//
// http://apache.org/xml/properties/dom/document-class-name
// This property can be used to set/query the name of the
// document factory.
//
else if (property.equals("dom/document-class-name")) {
return getDocumentClassName();
}
}
//
// Pass request off to XMLParser for the common cases.
//
return super.getProperty(propertyId);
} // getProperty(String):Object
//
// XMLParser methods
//
/** Start document. */
public void startDocument(int versionIndex, int encodingIndex,
int standAloneIndex) {
// clean up unused strings
if (versionIndex != -1) {
fStringPool.orphanString(versionIndex);
}
if (encodingIndex != -1) {
fStringPool.orphanString(encodingIndex);
}
if (standAloneIndex != -1) {
fStringPool.orphanString(standAloneIndex);
}
// deferred expansion
String documentClassName = null;
try {
documentClassName = getDocumentClassName();
} catch (SAXException e) {
throw new RuntimeException("PAR008 Fatal error getting document factory.");
}
boolean deferNodeExpansion = true;
try {
deferNodeExpansion = getDeferNodeExpansion();
} catch (SAXException e) {
throw new RuntimeException("PAR009 Fatal error reading expansion mode.");
}
if (documentClassName.equals(DEFAULT_DOCUMENT_CLASS_NAME) && deferNodeExpansion) {
boolean nsEnabled = false;
try { nsEnabled = getNamespaces(); }
catch (SAXException s) {}
fDocument = fDeferredDocumentImpl =
new DeferredDocumentImpl(fStringPool, nsEnabled, fGrammarAccess);
fDocumentIndex = fDeferredDocumentImpl.createDocument();
fCurrentNodeIndex = fDocumentIndex;
}
// full expansion
else {
if (documentClassName.equals(DEFAULT_DOCUMENT_CLASS_NAME)) {
fDocument = fDocumentImpl = new DocumentImpl(fGrammarAccess);
fDocumentImpl.setErrorChecking(false);
}
else {
try {
Class documentClass = Class.forName(documentClassName);
fDocument = (Document)documentClass.newInstance();
}
catch (Exception e) {
// REVISIT: We've already checked the type of the factory
// in the setDocumentClassName() method. The only
// exception that can occur here is if the class
// doesn't have a zero-arg constructor. -Ac
}
}
fCurrentElementNode = fDocument;
}
} // startDocument()
/** End document. */
public void endDocument() throws Exception {}
/** Report the start of the scope of a namespace declaration. */
public void startNamespaceDeclScope(int prefix, int uri) throws Exception {}
/** Report the end of the scope of a namespace declaration. */
public void endNamespaceDeclScope(int prefix) throws Exception {}
/** Start element. */
public void startElement(int elementTypeIndex,
XMLAttrList xmlAttrList, int attrListIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
int element = fDeferredDocumentImpl.createElement(elementTypeIndex, xmlAttrList, attrListIndex);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, element);
fCurrentNodeIndex = element;
fWithinElement = true;
// identifier registration
int index = xmlAttrList.getFirstAttr(attrListIndex);
while (index != -1) {
if (xmlAttrList.getAttType(index) == fStringPool.addSymbol("ID")) {
int nameIndex = xmlAttrList.getAttValue(index);
fDeferredDocumentImpl.putIdentifier(nameIndex, element);
}
index = xmlAttrList.getNextAttr(index);
}
// copy schema grammar, if needed
if (!fSeenRootElement) {
fSeenRootElement = true;
if (fGrammarAccess && fValidator == fSchemaValidator) {
Document schemaDocument = fSchemaValidator.getSchemaDocument();
if (schemaDocument != null) {
if (fDocumentTypeIndex == -1) {
fDocumentTypeIndex = fDeferredDocumentImpl.createDocumentType(elementTypeIndex, -1, -1);
fDeferredDocumentImpl.appendChild(0, fDocumentTypeIndex);
}
Element schema = schemaDocument.getDocumentElement();
copyInto(schema, fDocumentTypeIndex);
}
}
}
}
// full expansion
else {
boolean nsEnabled = false;
try { nsEnabled = getNamespaces(); }
catch (SAXException s) {}
String elementName = fStringPool.toString(elementTypeIndex);
AttributeList attrList = xmlAttrList.getAttributeList(attrListIndex);
Element e;
if (nsEnabled) {
e = fDocument.createElementNS(
fStringPool.toString(fStringPool.getURIForQName(elementTypeIndex)),
fStringPool.toString(elementTypeIndex)
);
} else {
e = fDocument.createElement(elementName);
}
int attrListLength = attrList.getLength();
for (int i = 0; i < attrListLength; i++) {
if (nsEnabled) {
int attName = xmlAttrList.getAttrName(i);
String attNameStr = fStringPool.toString(attName);
int nsURIIndex = fStringPool.getURIForQName(attName);
String namespaceURI = fStringPool.toString(nsURIIndex);
// DOM Level 2 wants all namespace declaration attributes
// to be bound to "http://www.w3.org/2000/xmlns/"
// So as long as the XML parser doesn't do it, it needs to
// done here.
int prefixIndex = fStringPool.getPrefixForQName(attName);
String prefix = fStringPool.toString(prefixIndex);
if (namespaceURI == null) {
if (prefix != null) {
if (prefix.equals("xmlns")) {
namespaceURI = "http://www.w3.org/2000/xmlns/";
}
} else if (attNameStr.equals("xmlns")) {
namespaceURI = "http://www.w3.org/2000/xmlns/";
}
}
e.setAttributeNS(namespaceURI,
attNameStr,
attrList.getValue(i));
} else {
String attrName = attrList.getName(i);
String attrValue = attrList.getValue(i);
e.setAttribute(attrName, attrValue);
if (fDocumentImpl != null && !xmlAttrList.isSpecified(i)) {
((AttrImpl)e.getAttributeNode(attrName)).setSpecified(false);
}
}
}
fCurrentElementNode.appendChild(e);
fCurrentElementNode = e;
fWithinElement = true;
// identifier registration
if (fDocumentImpl != null) {
int index = xmlAttrList.getFirstAttr(attrListIndex);
while (index != -1) {
if (xmlAttrList.getAttType(index) == fStringPool.addSymbol("ID")) {
String name = fStringPool.toString(xmlAttrList.getAttValue(index));
fDocumentImpl.putIdentifier(name, e);
}
index = xmlAttrList.getNextAttr(index);
}
}
xmlAttrList.releaseAttrList(attrListIndex);
// copy schema grammar, if needed
if (!fSeenRootElement) {
fSeenRootElement = true;
if (fDocumentImpl != null && fGrammarAccess && fValidator == fSchemaValidator) {
Document schemaDocument = fSchemaValidator.getSchemaDocument();
if (schemaDocument != null) {
if (fDocumentType == null) {
String rootName = elementName;
String systemId = ""; // REVISIT: How do we get this value? -Ac
String publicId = ""; // REVISIT: How do we get this value? -Ac
fDocumentType = fDocumentImpl.createDocumentType(rootName, publicId,
systemId);
fDocument.appendChild(fDocumentType);
}
Element schema = schemaDocument.getDocumentElement();
XUtil.copyInto(schema, fDocumentType);
}
}
}
}
} // startElement(int,XMLAttrList,int)
/** End element. */
public void endElement(int elementTypeIndex)
throws Exception {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
fCurrentNodeIndex = fDeferredDocumentImpl.getParentNode(fCurrentNodeIndex, false);
fWithinElement = false;
}
// full node expansion
else {
fCurrentElementNode = fCurrentElementNode.getParentNode();
fWithinElement = false;
}
} // endElement(int)
/** Characters. */
public void characters(int dataIndex)
throws Exception {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int text;
if (fInCDATA) {
text = fDeferredDocumentImpl.createCDATASection(dataIndex, false);
} else {
// The Text normalization is taken care of within the Text Node
// in the DEFERRED case.
text = fDeferredDocumentImpl.createTextNode(dataIndex, false);
}
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, text);
}
// full node expansion
else {
Text text;
if (fInCDATA) {
text = fDocument.createCDATASection(fStringPool.orphanString(dataIndex));
}
else {
if (fWithinElement && fCurrentElementNode.getNodeType() == Node.ELEMENT_NODE) {
Node lastChild = fCurrentElementNode.getLastChild();
if (lastChild != null
&& lastChild.getNodeType() == Node.TEXT_NODE) {
// Normalization of Text Nodes - append rather than create.
((Text)lastChild).appendData(fStringPool.orphanString(dataIndex));
return;
}
}
text = fDocument.createTextNode(fStringPool.orphanString(dataIndex));
}
fCurrentElementNode.appendChild(text);
}
} // characters(int)
/** Ignorable whitespace. */
public void ignorableWhitespace(int dataIndex) throws Exception {
// ignore the whitespace
if (!fIncludeIgnorableWhitespace) {
fStringPool.orphanString(dataIndex);
return;
}
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int text;
if (fInCDATA) {
text = fDeferredDocumentImpl.createCDATASection(dataIndex, true);
} else {
// The Text normalization is taken care of within the Text Node
// in the DEFERRED case.
text = fDeferredDocumentImpl.createTextNode(dataIndex, true);
}
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, text);
}
// full node expansion
else {
Text text;
if (fInCDATA) {
text = fDocument.createCDATASection(fStringPool.orphanString(dataIndex));
}
else {
if (fWithinElement && fCurrentElementNode.getNodeType() == Node.ELEMENT_NODE) {
Node lastChild = fCurrentElementNode.getLastChild();
if (lastChild != null
&& lastChild.getNodeType() == Node.TEXT_NODE) {
// Normalization of Text Nodes - append rather than create.
((Text)lastChild).appendData(fStringPool.orphanString(dataIndex));
return;
}
}
text = fDocument.createTextNode(fStringPool.orphanString(dataIndex));
}
if (fDocumentImpl != null) {
((TextImpl)text).setIgnorableWhitespace(true);
}
fCurrentElementNode.appendChild(text);
}
} // ignorableWhitespace(int)
/** Processing instruction. */
public void processingInstruction(int targetIndex, int dataIndex)
throws Exception {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int pi = fDeferredDocumentImpl.createProcessingInstruction(targetIndex, dataIndex);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, pi);
}
// full node expansion
else {
String target = fStringPool.orphanString(targetIndex);
String data = fStringPool.orphanString(dataIndex);
ProcessingInstruction pi = fDocument.createProcessingInstruction(target, data);
fCurrentElementNode.appendChild(pi);
}
} // processingInstruction(int,int)
/** Comment. */
public void comment(int dataIndex) throws Exception {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int comment = fDeferredDocumentImpl.createComment(dataIndex);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, comment);
}
// full node expansion
else {
Comment comment = fDocument.createComment(fStringPool.orphanString(dataIndex));
fCurrentElementNode.appendChild(comment);
}
} // comment(int)
/** Not called. */
public void characters(char ch[], int start, int length) throws Exception {}
/** Not called. */
public void ignorableWhitespace(char ch[], int start, int length) throws Exception {}
//
// XMLDocumentScanner methods
//
/** Start CDATA section. */
public void startCDATA() throws Exception {
fInCDATA = true;
}
/** End CDATA section. */
public void endCDATA() throws Exception {
fInCDATA = false;
}
//
// XMLEntityHandler methods
//
/** Start entity reference. */
public void startEntityReference(int entityName, int entityType,
int entityContext) throws Exception {
// are we ignoring entity reference nodes?
if (!fCreateEntityReferenceNodes) {
return;
}
// ignore built-in entities
if (entityName == fAmpIndex ||
entityName == fGtIndex ||
entityName == fLtIndex ||
entityName == fAposIndex ||
entityName == fQuotIndex) {
return;
}
// we only support one context for entity references right now...
if (entityContext != XMLEntityHandler.CONTEXT_IN_CONTENT) {
return;
}
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int entityRefIndex = fDeferredDocumentImpl.createEntityReference(entityName);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, entityRefIndex);
fCurrentNodeIndex = entityRefIndex;
}
// full node expansion
else {
EntityReference er = fDocument.createEntityReference(fStringPool.toString(entityName));
fCurrentElementNode.appendChild(er);
fCurrentElementNode = er;
}
} // startEntityReference(int,int,int)
/** End entity reference. */
public void endEntityReference(int entityName, int entityType,
int entityContext) throws Exception {
// are we ignoring entity reference nodes?
if (!fCreateEntityReferenceNodes) {
return;
}
// ignore built-in entities
if (entityName == fAmpIndex ||
entityName == fGtIndex ||
entityName == fLtIndex ||
entityName == fAposIndex ||
entityName == fQuotIndex) {
return;
}
// we only support one context for entity references right now...
if (entityContext != XMLEntityHandler.CONTEXT_IN_CONTENT) {
return;
}
// deferred node expansion
if (fDeferredDocumentImpl != null) {
String name = fStringPool.toString(entityName);
int erChild = fCurrentNodeIndex;
fCurrentNodeIndex = fDeferredDocumentImpl.getParentNode(erChild, false);
// should never be true - we should not return here.
if (fDeferredDocumentImpl.getNodeType(erChild, false) != Node.ENTITY_REFERENCE_NODE) return;
erChild = fDeferredDocumentImpl.getFirstChild(erChild, false); // first Child of EntityReference
if (fDocumentTypeIndex != -1) {
// find Entity decl for this EntityReference.
int entityDecl = fDeferredDocumentImpl.getFirstChild(fDocumentTypeIndex, false);
while (entityDecl != -1) {
if (fDeferredDocumentImpl.getNodeType(entityDecl, false) == Node.ENTITY_NODE
&& fDeferredDocumentImpl.getNodeNameString(entityDecl, false).equals(name)) // string compare...
{
break;
}
entityDecl = fDeferredDocumentImpl.getNextSibling(entityDecl, false);
}
if (entityDecl != -1
&& fDeferredDocumentImpl.getFirstChild(entityDecl, false) == -1) {
// found entityDecl with same name as this reference
// AND it doesn't have any children.
// we don't need to iterate, because the whole structure
// should already be connected to the 1st child.
fDeferredDocumentImpl.setAsFirstChild(entityDecl, erChild);
}
}
}
// full node expansion
else {
Node erNode = fCurrentElementNode;//fCurrentElementNode.getParentNode();
fCurrentElementNode = erNode.getParentNode();
if (fDocumentImpl != null) {
NamedNodeMap entities = fDocumentType.getEntities();
String name = fStringPool.toString(entityName);
Node entityNode = entities.getNamedItem(name);
// We could simply return here if there is no entity for the reference.
if (entityNode == null || entityNode.hasChildNodes()) {
return;
}
Entity entity = (Entity)entityNode;
if (erNode.hasChildNodes()) {
NodeList list = erNode.getChildNodes();
int len = list.getLength();
for (int i = 0; i < len; i++) {
Node childClone = list.item(i).cloneNode(true);
entity.appendChild(childClone);
}
}
}
}
} // endEntityReference(int,int,int)
//
// DTDValidator.EventHandler methods
//
/**
* This function will be called when a <!DOCTYPE...> declaration is
* encountered.
*/
public void startDTD(int rootElementType, int publicId, int systemId)
throws Exception {
// full expansion
if (fDocumentImpl != null) {
String rootElementName = fStringPool.toString(rootElementType);
String publicString = fStringPool.toString(publicId);
String systemString = fStringPool.toString(systemId);
fDocumentType = fDocumentImpl.
createDocumentType(rootElementName, publicString, systemString);
fDocumentImpl.appendChild(fDocumentType);
if (fGrammarAccess) {
Element schema = fDocument.createElement("schema");
// REVISIT: What should the namespace be? -Ac
schema.setAttribute("xmlns", "http://www.w3.org/1999/XMLSchema");
((AttrImpl)schema.getAttributeNode("xmlns")).setSpecified(false);
schema.setAttribute("finalDefault", "");
((AttrImpl)schema.getAttributeNode("finalDefault")).setSpecified(false);
schema.setAttribute("exactDefault", "");
((AttrImpl)schema.getAttributeNode("exactDefault")).setSpecified(false);
fDocumentType.appendChild(schema);
}
}
// deferred expansion
else if (fDeferredDocumentImpl != null) {
fDocumentTypeIndex =
fDeferredDocumentImpl.
createDocumentType(rootElementType, publicId, systemId);
fDeferredDocumentImpl.appendChild(fDocumentIndex, fDocumentTypeIndex);
if (fGrammarAccess) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("xmlns"),
fStringPool.addString("http://www.w3.org/1999/XMLSchema"),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("finalDefault"),
fStringPool.addString(""),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("exactDefault"),
fStringPool.addString(""),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.endAttrList();
int schemaIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("schema"), fAttrList, handle);
// REVISIT: What should the namespace be? -Ac
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, schemaIndex);
}
}
} // startDTD(int,int,int)
/**
* Supports DOM Level 2 internalSubset additions.
* Called when the internal subset is completely scanned.
*/
public void internalSubset(int internalSubset) {
//System.out.println("internalSubset callback:"+fStringPool.toString(internalSubset));
// full expansion
if (fDocumentImpl != null && fDocumentType != null) {
((DocumentTypeImpl)fDocumentType).setInternalSubset(fStringPool.toString(internalSubset));
}
// deferred expansion
else if (fDeferredDocumentImpl != null) {
fDeferredDocumentImpl.setInternalSubset(fDocumentTypeIndex, internalSubset);
}
}
/**
* This function will be called at the end of the DTD.
*/
public void endDTD() throws Exception {}
/**
* <!ELEMENT Name contentspec>
*/
public void elementDecl(int elementTypeIndex,
XMLValidator.ContentSpec contentSpec)
throws Exception {
if (DEBUG_ATTLIST_DECL) {
String contentModel = contentSpec.toString();
System.out.println("elementDecl(" + fStringPool.toString(elementTypeIndex) + ", " +
contentModel + ")");
}
//
// Create element declaration
//
if (fGrammarAccess) {
if (fDeferredDocumentImpl != null) {
//
// Build element
//
// get element declaration; create if necessary
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String elementName = fStringPool.toString(elementTypeIndex);
int elementIndex = getFirstChildElement(schemaIndex, "element", "name", elementName);
if (elementIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(elementName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("minOccurs"), // name
fStringPool.addString("1"), // value
fStringPool.addSymbol("NMTOKEN"), // type
false, // specified
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("nullable"), // name
fStringPool.addString("false"), // value
fStringPool.addSymbol("ENUMERATION"), // type
false, // specified
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("abstract"), // name
fStringPool.addString("false"), // value
fStringPool.addSymbol("ENUMERATION"), // type
false, // specified
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("final"), // name
fStringPool.addString("false"), // value
fStringPool.addSymbol("ENUMERATION"), // type
false, // specified
false); // search
fAttrList.endAttrList();
elementIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("element"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, elementIndex);
}
//
// Build content model
//
// <!ELEMENT name (#PCDATA)>
int contentType = contentSpec.getType();
String contentTypeName = fStringPool.toString(contentType);
XMLContentSpecNode node = new XMLContentSpecNode();
int contentSpecHandle = contentSpec.getHandle();
if (contentSpecHandle != -1) {
contentSpec.getNode(contentSpecHandle, node);
}
// (#PCDATA)
if (contentTypeName.equals("MIXED") && node.type == 0) {
int attrNameIndex = fStringPool.addSymbol("type");
int attrValueIndex = fStringPool.addString("string");
boolean attrSpecified = true;
int attrIndex = fDeferredDocumentImpl.createAttribute(attrNameIndex, attrValueIndex, attrSpecified);
fDeferredDocumentImpl.setAttributeNode(elementIndex, attrIndex);
}
// other content models
else {
// get type element; create if necessary
int typeIndex = getFirstChildElement(elementIndex, "type");
if (typeIndex == -1) {
typeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("type"), null, -1);
// REVISIT: Check for type redeclaration? -Ac
fDeferredDocumentImpl.insertBefore(elementIndex, typeIndex, getFirstChildElement(elementIndex));
}
// <!ELEMENT name EMPTY>
if (contentTypeName.equals("EMPTY")) {
int attributeIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("content"), fStringPool.addString("empty"), true);
fDeferredDocumentImpl.setAttributeNode(typeIndex, attributeIndex);
}
// <!ELEMENT name ANY>
else if (contentTypeName.equals("ANY")) {
int anyIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("any"), null, -1);
fDeferredDocumentImpl.insertBefore(typeIndex, anyIndex, getFirstChildElement(typeIndex));
}
// <!ELEMENT name (a,b,...)> or <!ELEMENT name (a|b|...)>
else if (contentTypeName.equals("CHILDREN")) {
int attributeIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("content"), fStringPool.addString("elementOnly"), true);
fDeferredDocumentImpl.setAttributeNode(typeIndex, attributeIndex);
//attributeIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("order"), fStringPool.addString("seq"), false);
//fDeferredDocumentImpl.setAttributeNode(typeIndex, attributeIndex);
int contentSpecIndex = contentSpec.getHandle();
contentSpec.getNode(contentSpecIndex, node);
Element model = createContentModel(contentSpec, node);
int modelIndex = createDeferredContentModel(model);
int firstChildIndex = getFirstChildElement(typeIndex);
fDeferredDocumentImpl.insertBefore(typeIndex, modelIndex, firstChildIndex);
}
// <!ELEMENT name (#PCDATA|a|...)*>
else {
// REVISIT: Any chance of getting other than MIXED? -Ac
// set content type
int attrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("content"), fStringPool.addString("mixed"), true);
fDeferredDocumentImpl.setAttributeNode(typeIndex, attrIndex);
// skip '*' node
contentSpec.getNode(node.value, node);
// add leaves (on descent)
do {
int index = node.value;
int handle = fAttrList.startAttrList();
contentSpec.getNode(node.otherValue, node);
String elementRefName = fStringPool.toString(node.value);
fAttrList.addAttr(
fStringPool.addSymbol("ref"),
fStringPool.addString(elementRefName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.endAttrList();
int elementRefIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("element"), fAttrList, handle);
fDeferredDocumentImpl.insertBefore(typeIndex, elementRefIndex, getFirstChildElement(typeIndex, "element"));
contentSpec.getNode(index, node);
} while (node.type != XMLContentSpecNode.CONTENTSPECNODE_LEAF);
}
}
} // if defer-node-expansion
else if (fDocumentImpl != null) {
//
// Build element
//
// get element declaration; create if necessary
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
String elementName = fStringPool.toString(elementTypeIndex);
Element element = XUtil.getFirstChildElement(schema, "element", "name", elementName);
if (element == null) {
element = fDocument.createElement("element");
element.setAttribute("name", elementName);
element.setAttribute("minOccurs", "1");
((AttrImpl)element.getAttributeNode("minOccurs")).setSpecified(false);
element.setAttribute("nullable", "false");
((AttrImpl)element.getAttributeNode("nullable")).setSpecified(false);
element.setAttribute("abstract", "false");
((AttrImpl)element.getAttributeNode("abstract")).setSpecified(false);
element.setAttribute("final", "false");
((AttrImpl)element.getAttributeNode("final")).setSpecified(false);
schema.appendChild(element);
}
//
// Build content model
//
// <!ELEMENT name (#PCDATA)>
int contentType = contentSpec.getType();
String contentTypeName = fStringPool.toString(contentType);
XMLContentSpecNode node = new XMLContentSpecNode();
int contentSpecHandle = contentSpec.getHandle();
if (contentSpecHandle != -1) {
contentSpec.getNode(contentSpecHandle, node);
}
// (#PCDATA)
if (contentTypeName.equals("MIXED") && node.type == 0) {
element.setAttribute("type", "string");
}
// other content models
else {
// get type element; create if necessary
Element type = XUtil.getFirstChildElement(element, "type");
if (type == null) {
type = fDocumentImpl.createElement("type");
// REVISIT: Check for type redeclaration? -Ac
element.insertBefore(type, XUtil.getFirstChildElement(element));
}
// <!ELEMENT name EMPTY>
if (contentTypeName.equals("EMPTY")) {
type.setAttribute("content", "empty");
}
// <!ELEMENT name ANY>
else if (contentTypeName.equals("ANY")) {
Element any = fDocumentImpl.createElement("any");
type.insertBefore(any, XUtil.getFirstChildElement(type));
}
// <!ELEMENT name (a,b,...)> or <!ELEMENT name (a|b|...)>
else if (contentTypeName.equals("CHILDREN")) {
type.setAttribute("content", "elementOnly");
//((AttrImpl)type.getAttributeNode("content")).setSpecified(false);
//type.setAttribute("order", "seq");
//((AttrImpl)type.getAttributeNode("order")).setSpecified(false);
int contentSpecIndex = contentSpec.getHandle();
contentSpec.getNode(contentSpecIndex, node);
Element model = createContentModel(contentSpec, node);
Node fragment = fDocument.createDocumentFragment();
XUtil.copyInto(model, fragment);
Element firstChild = XUtil.getFirstChildElement(type);
type.insertBefore(fragment, firstChild);
}
// <!ELEMENT name (#PCDATA|a|...)*>
else {
// REVISIT: Any chance of getting other than MIXED? -Ac
// set content type
type.setAttribute("content", "mixed");
// skip '*' node
contentSpec.getNode(node.value, node);
// add leaves (on descent)
do {
int index = node.value;
int handle = fAttrList.startAttrList();
contentSpec.getNode(node.otherValue, node);
String elementRefName = fStringPool.toString(node.value);
Element elementRef = fDocumentImpl.createElement("element");
elementRef.setAttribute("ref", elementRefName);
type.insertBefore(elementRef, XUtil.getFirstChildElement(type, "element"));
contentSpec.getNode(index, node);
} while (node.type != XMLContentSpecNode.CONTENTSPECNODE_LEAF);
}
}
} // if NOT defer-node-expansion
} // if grammar-access
} // elementDecl(int,String)
/**
* <!ATTLIST Name AttDef>
*/
public void attlistDecl(int elementTypeIndex,
int attrNameIndex, int attType,
String enumString,
int attDefaultType, int attDefaultValue)
throws Exception {
if (DEBUG_ATTLIST_DECL) {
System.out.println("attlistDecl(" + fStringPool.toString(elementTypeIndex) + ", " +
fStringPool.toString(attrNameIndex) + ", " +
fStringPool.toString(attType) + ", " +
enumString + ", " +
fStringPool.toString(attDefaultType) + ", " +
fStringPool.toString(attDefaultValue) + ")");
}
// deferred expansion
if (fDeferredDocumentImpl != null) {
// get the default value
if (attDefaultValue != -1) {
if (DEBUG_ATTLIST_DECL) {
System.out.println(" adding default attribute value: "+
fStringPool.toString(attDefaultValue));
}
// get element definition
int elementDefIndex = fDeferredDocumentImpl.lookupElementDefinition(elementTypeIndex);
// create element definition if not already there
if (elementDefIndex == -1) {
elementDefIndex = fDeferredDocumentImpl.createElementDefinition(elementTypeIndex);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, elementDefIndex);
}
// add default attribute
int attrIndex = fDeferredDocumentImpl.createAttribute(attrNameIndex, attDefaultValue, false);
fDeferredDocumentImpl.appendChild(elementDefIndex, attrIndex);
}
//
// Create attribute declaration
//
if (fGrammarAccess) {
// get element declaration; create it if necessary
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String elementName = fStringPool.toString(elementTypeIndex);
int elementIndex = getFirstChildElement(schemaIndex, "element", "name", elementName);
if (elementIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(elementName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); //search
fAttrList.addAttr(
fStringPool.addSymbol("export"),
fStringPool.addString("true"),
fStringPool.addSymbol("ENUMERATION"),
false,
false); // search
fAttrList.endAttrList();
elementIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("element"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, elementIndex);
}
// get type element; create it if necessary
int typeIndex = getFirstChildElement(elementIndex, "type");
if (typeIndex == -1) {
typeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("type"), null, -1);
fDeferredDocumentImpl.insertBefore(elementIndex, typeIndex, getFirstChildElement(elementIndex));
}
// create attribute and set its attributes
String attributeName = fStringPool.toString(attrNameIndex);
int attributeIndex = getFirstChildElement(elementIndex, "attribute", "name", attributeName);
if (attributeIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(attributeName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
/***
fAttrList.addAttr(
fStringPool.addSymbol("type"),
fStringPool.addString("string"),
fStringPool.addSymbol("CDATA"),
true,
false); // search
/***/
fAttrList.addAttr(
fStringPool.addSymbol("minOccurs"),
fStringPool.addString("0"),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("maxOccurs"),
fStringPool.addString("1"),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.endAttrList();
attributeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("attribute"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(typeIndex, attributeIndex);
// attribute type: CDATA, ENTITY, ... , NMTOKENS; ENUMERATION
String attributeTypeName = fStringPool.toString(attType);
if (attributeTypeName.equals("CDATA")) {
int typeAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("type"), fStringPool.addString("string"), false);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, typeAttrIndex);
}
else if (attributeTypeName.equals("ENUMERATION")) {
handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("source"),
fStringPool.addString("NMTOKEN"),
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.endAttrList();
int datatypeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("datatype"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(attributeIndex, datatypeIndex);
String tokenizerString = enumString.substring(1, enumString.length() - 1);
StringTokenizer tokenizer = new StringTokenizer(tokenizerString, "|");
while (tokenizer.hasMoreTokens()) {
handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("value"),
fStringPool.addString(tokenizer.nextToken()),
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.endAttrList();
int enumerationIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("enumeration"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(datatypeIndex, enumerationIndex);
}
}
else {
// REVISIT: Could we ever get an unknown data type? -Ac
int typeAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("type"), fStringPool.addString(attributeTypeName), true);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, typeAttrIndex);
}
// attribute default type: #IMPLIED, #REQUIRED, #FIXED
boolean fixed = false;
if (attDefaultType != -1) {
String attributeDefaultTypeName = fStringPool.toString(attDefaultType);
if (attributeDefaultTypeName.equals("#REQUIRED")) {
int minOccursAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("minOccurs"), fStringPool.addString("1"), true);
int oldMinOccursAttrIndex = fDeferredDocumentImpl.setAttributeNode(attributeIndex, minOccursAttrIndex);
fStringPool.releaseString(fDeferredDocumentImpl.getNodeValue(oldMinOccursAttrIndex, false));
}
else if (attributeDefaultTypeName.equals("#FIXED")) {
fixed = true;
int fixedAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("fixed"), attDefaultValue, true);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, fixedAttrIndex);
}
}
// attribute default value
if (!fixed && attDefaultValue != -1) {
int defaultAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("default"), attDefaultValue, true);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, defaultAttrIndex);
}
}
}
}
// full expansion
else if (fDocumentImpl != null) {
// get the default value
if (attDefaultValue != -1) {
if (DEBUG_ATTLIST_DECL) {
System.out.println(" adding default attribute value: "+
fStringPool.toString(attDefaultValue));
}
// get element name
String elementName = fStringPool.toString(elementTypeIndex);
// get element definition node
NamedNodeMap elements = ((DocumentTypeImpl)fDocumentType).getElements();
ElementDefinitionImpl elementDef = (ElementDefinitionImpl)elements.getNamedItem(elementName);
if (elementDef == null) {
elementDef = fDocumentImpl.createElementDefinition(elementName);
((DocumentTypeImpl)fDocumentType).getElements().setNamedItem(elementDef);
}
// REVISIT: Check for uniqueness of element name? -Ac
// get attribute name and value index
String attrName = fStringPool.toString(attrNameIndex);
String attrValue = fStringPool.toString(attDefaultValue);
// create attribute and set properties
AttrImpl attr = (AttrImpl)fDocumentImpl.createAttribute(attrName);
attr.setValue(attrValue);
attr.setSpecified(false);
// add default attribute to element definition
elementDef.getAttributes().setNamedItem(attr);
}
//
// Create attribute declaration
//
if (fGrammarAccess) {
// get element declaration; create it if necessary
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
String elementName = fStringPool.toString(elementTypeIndex);
Element element = XUtil.getFirstChildElement(schema, "element", "name", elementName);
if (element == null) {
element = fDocument.createElement("element");
element.setAttribute("name", elementName);
//element.setAttribute("export", "true");
//((AttrImpl)element.getAttributeNode("export")).setSpecified(false);
schema.appendChild(element);
}
// get type element; create it if necessary
Element type = XUtil.getFirstChildElement(element, "type");
if (type == null) {
type = fDocument.createElement("type");
// REVISIT: Check for archetype redeclaration? -Ac
element.insertBefore(type, XUtil.getFirstChildElement(element));
}
// create attribute and set its attributes
String attributeName = fStringPool.toString(attrNameIndex);
Element attribute = XUtil.getFirstChildElement(element, "attribute", "name", attributeName);
if (attribute == null) {
attribute = fDocument.createElement("attribute");
attribute.setAttribute("name", attributeName);
attribute.setAttribute("minOccurs", "0");
((AttrImpl)attribute.getAttributeNode("minOccurs")).setSpecified(false);
attribute.setAttribute("maxOccurs", "1");
((AttrImpl)attribute.getAttributeNode("maxOccurs")).setSpecified(false);
type.appendChild(attribute);
// attribute type: CDATA, ENTITY, ... , NMTOKENS; ENUMERATION
String attributeTypeName = fStringPool.toString(attType);
if (attributeTypeName.equals("CDATA")) {
attribute.setAttribute("type", "string");
((AttrImpl)attribute.getAttributeNode("type")).setSpecified(false);
}
else if (attributeTypeName.equals("ENUMERATION")) {
Element datatype = fDocumentImpl.createElement("datatype");
datatype.setAttribute("source", "NMTOKEN");
attribute.appendChild(datatype);
String tokenizerString = enumString.substring(1, enumString.length() - 1);
StringTokenizer tokenizer = new StringTokenizer(tokenizerString, "|");
while (tokenizer.hasMoreTokens()) {
Element enumeration = fDocument.createElement("enumeration");
enumeration.setAttribute("value", tokenizer.nextToken());
datatype.appendChild(enumeration);
}
}
else {
// REVISIT: Could we ever get an unknown data type? -Ac
attribute.setAttribute("type", attributeTypeName);
}
// attribute default type: #IMPLIED, #REQUIRED, #FIXED
boolean fixed = false;
if (attDefaultType != -1) {
String attributeDefaultTypeName = fStringPool.toString(attDefaultType);
if (attributeDefaultTypeName.equals("#REQUIRED")) {
attribute.setAttribute("minOccurs", "1");
((AttrImpl)attribute.getAttributeNode("minOccurs")).setSpecified(true);
}
else if (attributeDefaultTypeName.equals("#FIXED")) {
fixed = true;
String fixedValue = fStringPool.toString(attDefaultValue);
attribute.setAttribute("fixed", fixedValue);
}
}
// attribute default value
if (!fixed && attDefaultValue != -1) {
String defaultValue = fStringPool.toString(attDefaultValue);
attribute.setAttribute("default", defaultValue);
}
}
}
} // if NOT defer-node-expansion
} // attlistDecl(int,int,int,String,int,int)
/**
* <!ENTITY % Name EntityValue> (internal)
*/
public void internalPEDecl(int entityName, int entityValue) throws Exception {}
/**
* <!ENTITY % Name ExternalID> (external)
*/
public void externalPEDecl(int entityName, int publicId, int systemId) throws Exception {}
/**
* <!ENTITY Name EntityValue> (internal)
*/
public void internalEntityDecl(int entityNameIndex, int entityValueIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
if (fDocumentTypeIndex == -1) return; //revisit: should never happen. Exception?
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newEntityIndex = fDeferredDocumentImpl.createEntity(entityNameIndex, -1, -1, -1);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newEntityIndex);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// create internal entity declaration
if (fGrammarAccess) {
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String entityName = fStringPool.toString(entityNameIndex);
int textEntityIndex = getFirstChildElement(schemaIndex, "textEntity", "name", entityName);
if (textEntityIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(entityName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("export"),
fStringPool.addString("true"),
fStringPool.addSymbol("ENUMERATION"),
false,
false); // search
fAttrList.endAttrList();
textEntityIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("textEntity"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, textEntityIndex);
int textIndex = fDeferredDocumentImpl.createTextNode(entityValueIndex, false);
fDeferredDocumentImpl.appendChild(textEntityIndex, textIndex);
}
}
/***/
}
// full expansion
else if (fDocumentImpl != null) {
if (fDocumentType == null) return; //revisit: should never happen. Exception?
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String entityName = fStringPool.toString(entityNameIndex);
Entity entity = fDocumentImpl.createEntity(entityName);
fDocumentType.getEntities().setNamedItem(entity);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// create internal entity declaration
if (fGrammarAccess) {
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
Element textEntity = XUtil.getFirstChildElement(schema, "textEntity", "name", entityName);
if (textEntity == null) {
textEntity = fDocument.createElement("textEntity");
textEntity.setAttribute("name", entityName);
textEntity.setAttribute("export", "true");
((AttrImpl)textEntity.getAttributeNode("export")).setSpecified(false);
String entityValue = fStringPool.toString(entityValueIndex);
Text value = fDocument.createTextNode(entityValue);
textEntity.appendChild(value);
schema.appendChild(textEntity);
}
}
/***/
}
} // internalEntityDecl(int,int)
/**
* <!ENTITY Name ExternalID> (external)
*/
public void externalEntityDecl(int entityNameIndex, int publicIdIndex, int systemIdIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newEntityIndex = fDeferredDocumentImpl.createEntity(entityNameIndex, publicIdIndex, systemIdIndex, -1);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newEntityIndex);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// create external entity declaration
if (fGrammarAccess) {
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String entityName = fStringPool.toString(entityNameIndex);
int externalEntityIndex = getFirstChildElement(schemaIndex, "externalEntity", "name", entityName);
if (externalEntityIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(entityName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("export"),
fStringPool.addString("true"),
fStringPool.addSymbol("ENUMERATION"),
false,
false); // search
if (publicIdIndex != -1) {
fAttrList.addAttr(
fStringPool.addSymbol("public"),
publicIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
}
fAttrList.addAttr(
fStringPool.addSymbol("system"),
systemIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.endAttrList();
externalEntityIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("externalEntity"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, externalEntityIndex);
}
}
/***/
}
// full expansion
else if (fDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String entityName = fStringPool.toString(entityNameIndex);
String publicId = fStringPool.toString(publicIdIndex);
String systemId = fStringPool.toString(systemIdIndex);
EntityImpl entity = (EntityImpl)fDocumentImpl.createEntity(entityName);
if (publicIdIndex != -1) {
entity.setPublicId(publicId);
}
entity.setSystemId(systemId);
fDocumentType.getEntities().setNamedItem(entity);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// create external entity declaration
if (fGrammarAccess) {
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
Element externalEntity = XUtil.getFirstChildElement(schema, "externalEntity", "name", entityName);
if (externalEntity == null) {
externalEntity = fDocument.createElement("externalEntity");
externalEntity.setAttribute("name", entityName);
externalEntity.setAttribute("export", "true");
((AttrImpl)externalEntity.getAttributeNode("export")).setSpecified(false);
if (publicIdIndex != -1) {
externalEntity.setAttribute("public", publicId);
}
externalEntity.setAttribute("system", systemId);
schema.appendChild(externalEntity);
}
}
/***/
}
} // externalEntityDecl(int,int,int)
/**
* <!ENTITY Name ExternalID NDataDecl> (unparsed)
*/
public void unparsedEntityDecl(int entityNameIndex,
int publicIdIndex, int systemIdIndex,
int notationNameIndex) throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newEntityIndex = fDeferredDocumentImpl.createEntity(entityNameIndex, publicIdIndex, systemIdIndex, notationNameIndex);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newEntityIndex);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// add unparsed entity declaration
if (fGrammarAccess) {
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String entityName = fStringPool.toString(entityNameIndex);
int unparsedEntityIndex = getFirstChildElement(schemaIndex, "unparsedEntity", "name", entityName);
if (unparsedEntityIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(entityName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("export"),
fStringPool.addString("true"),
fStringPool.addSymbol("ENUMERATION"),
false,
false); // search
if (publicIdIndex != -1) {
fAttrList.addAttr(
fStringPool.addSymbol("public"),
publicIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
}
fAttrList.addAttr(
fStringPool.addSymbol("system"),
systemIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("notation"),
fStringPool.addString(fStringPool.toString(notationNameIndex)),
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.endAttrList();
unparsedEntityIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("unparsedEntity"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, unparsedEntityIndex);
}
}
/***/
}
// full expansion
else if (fDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String entityName = fStringPool.toString(entityNameIndex);
String publicId = fStringPool.toString(publicIdIndex);
String systemId = fStringPool.toString(systemIdIndex);
String notationName = fStringPool.toString(notationNameIndex);
EntityImpl entity = (EntityImpl)fDocumentImpl.createEntity(entityName);
if (publicIdIndex != -1) {
entity.setPublicId(publicId);
}
entity.setSystemId(systemId);
entity.setNotationName(notationName);
fDocumentType.getEntities().setNamedItem(entity);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// add unparsed entity declaration
if (fGrammarAccess) {
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
Element unparsedEntity = XUtil.getFirstChildElement(schema, "unparsedEntity", "name", entityName);
if (unparsedEntity == null) {
unparsedEntity = fDocument.createElement("unparsedEntity");
unparsedEntity.setAttribute("name", entityName);
unparsedEntity.setAttribute("export", "true");
((AttrImpl)unparsedEntity.getAttributeNode("export")).setSpecified(false);
if (publicIdIndex != -1) {
unparsedEntity.setAttribute("public", publicId);
}
unparsedEntity.setAttribute("system", systemId);
unparsedEntity.setAttribute("notation", notationName);
schema.appendChild(unparsedEntity);
}
}
/***/
}
} // unparsedEntityDecl(int,int,int,int)
/**
* <!NOTATION Name ExternalId>
*/
public void notationDecl(int notationNameIndex, int publicIdIndex, int systemIdIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newNotationIndex = fDeferredDocumentImpl.createNotation(notationNameIndex, publicIdIndex, systemIdIndex);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newNotationIndex);
// create notation declaration
if (fGrammarAccess) {
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String notationName = fStringPool.toString(notationNameIndex);
int notationIndex = getFirstChildElement(schemaIndex, "notation", "name", notationName);
if (notationIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(notationName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
/***
fAttrList.addAttr(
fStringPool.addSymbol("export"),
fStringPool.addString("true"),
fStringPool.addSymbol("ENUMERATION"),
false,
false); // search
/***/
if (publicIdIndex == -1) {
publicIdIndex = 0; // empty string in string pool
}
fAttrList.addAttr(
fStringPool.addSymbol("public"),
publicIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
if (systemIdIndex != -1) {
fAttrList.addAttr(
fStringPool.addSymbol("system"),
systemIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
}
fAttrList.endAttrList();
notationIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("notation"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, notationIndex);
}
}
}
// full expansion
else if (fDocumentImpl != null) {
// REVISIT: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String notationName = fStringPool.toString(notationNameIndex);
String publicId = fStringPool.toString(publicIdIndex);
String systemId = fStringPool.toString(systemIdIndex);
NotationImpl notationImpl = (NotationImpl)fDocumentImpl.createNotation(notationName);
notationImpl.setPublicId(publicId);
if (systemIdIndex != -1) {
notationImpl.setSystemId(systemId);
}
fDocumentType.getNotations().setNamedItem(notationImpl);
// create notation declaration
if (fGrammarAccess) {
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
Element notation = XUtil.getFirstChildElement(schema, "notation", "name", notationName);
if (notation == null) {
notation = fDocument.createElement("notation");
notation.setAttribute("name", notationName);
//notation.setAttribute("export", "true");
//((AttrImpl)notation.getAttributeNode("export")).setSpecified(false);
if (publicId == null) {
publicId = "";
}
notation.setAttribute("public", publicId);
if (systemIdIndex != -1) {
notation.setAttribute("system", systemId);
}
schema.appendChild(notation);
}
}
}
} // notationDecl(int,int,int)
//
// Private methods
//
/**
* Creates a content model from the specified content spec node.
* This method will always return a <em>group</em> element as the
* containing element, even when the content model contains a
* single element reference.
*/
private Element createContentModel(XMLValidator.ContentSpec contentSpec,
XMLContentSpecNode node) {
Element model = createContentModel(contentSpec, node,
new DocumentImpl(), null);
return model;
} // createContentModel(XMLContentSpecNode):Element
/**
* This is the real <em>createContentModel</em> method. This is a
* recursive solution.
*/
private Element createContentModel(XMLValidator.ContentSpec contentSpec,
XMLContentSpecNode node,
Document factory,
Element parent) {
// figure out occurrence count
int minOccur = 1;
int maxOccur = 1;
switch (node.type) {
case XMLContentSpecNode.CONTENTSPECNODE_ONE_OR_MORE: {
minOccur = 1;
maxOccur = -1;
contentSpec.getNode(node.value, node);
break;
}
case XMLContentSpecNode.CONTENTSPECNODE_ZERO_OR_MORE: {
minOccur = 0;
maxOccur = -1;
contentSpec.getNode(node.value, node);
break;
}
case XMLContentSpecNode.CONTENTSPECNODE_ZERO_OR_ONE: {
minOccur = 0;
maxOccur = 1;
contentSpec.getNode(node.value, node);
break;
}
}
// flatten model
int nodeType = node.type;
switch (nodeType) {
// CHOICE or SEQUENCE
case XMLContentSpecNode.CONTENTSPECNODE_CHOICE:
case XMLContentSpecNode.CONTENTSPECNODE_SEQ: {
// go down left side
int leftIndex = node.value;
int rightIndex = node.otherValue;
contentSpec.getNode(leftIndex, node);
Element left = createContentModel(contentSpec, node,
factory, parent);
// go down right side
contentSpec.getNode(rightIndex, node);
Element right = createContentModel(contentSpec, node,
factory, null);
// append left children
String type = nodeType == XMLContentSpecNode.CONTENTSPECNODE_CHOICE
? "choice"
: "seq";
Element model = left;
if (!left.getAttribute("order").equals(type)) {
String minOccurs = left.getAttribute("minOccurs");
String maxOccurs = left.getAttribute("maxOccurs");
if (parent == null ||
((minOccurs.equals("1") || minOccurs.length() == 0) &&
(maxOccurs.equals("1") || maxOccurs.length() == 0))) {
model = factory.createElement("group");
model.setAttribute("order", type);
if (type.equals("seq")) {
((AttrImpl)model.getAttributeNode("order")).setSpecified(false);
}
model.appendChild(left);
}
else {
model = parent;
}
}
// set occurrence count
setOccurrenceCount(model, minOccur, maxOccur);
// append right children
model.appendChild(right);
// return model
return model;
}
// LEAF
case XMLContentSpecNode.CONTENTSPECNODE_LEAF: {
String name = fStringPool.toString(node.value);
Element leaf = factory.createElement("element");
leaf.setAttribute("ref", name);
// set occurrence count and return
setOccurrenceCount(leaf, minOccur, maxOccur);
return leaf;
}
} // switch node type
// error
return null;
} // createContentModel(XMLContentSpecNode,Element):Element
/**
* Sets the appropriate occurrence count attributes on the specified
* model element.
*/
private void setOccurrenceCount(Element model, int minOccur, int maxOccur) {
// min
model.setAttribute("minOccurs", Integer.toString(minOccur));
if (minOccur == 1) {
((AttrImpl)model.getAttributeNode("minOccurs")).setSpecified(false);
}
// max
if (maxOccur == -1) {
model.setAttribute("maxOccurs", "*");
}
else if (maxOccur != 1) {
model.setAttribute("maxOccurs", Integer.toString(maxOccur));
}
} // setOccurrenceCount(Element,int,int)
/** Returns the first child element of the specified node. */
private int getFirstChildElement(int nodeIndex) {
int childIndex = fDeferredDocumentImpl.getFirstChild(nodeIndex, false);
while (childIndex != -1) {
if (fDeferredDocumentImpl.getNodeType(childIndex, false) == Node.ELEMENT_NODE) {
return childIndex;
}
childIndex = fDeferredDocumentImpl.getNextSibling(childIndex, false);
}
return -1;
}
/** Returns the next sibling element of the specified node. */
private int getNextSiblingElement(int nodeIndex) {
int siblingIndex = fDeferredDocumentImpl.getNextSibling(nodeIndex, false);
while (siblingIndex != -1) {
if (fDeferredDocumentImpl.getNodeType(siblingIndex, false) == Node.ELEMENT_NODE) {
return siblingIndex;
}
siblingIndex = fDeferredDocumentImpl.getNextSibling(siblingIndex, false);
}
return -1;
}
/** Returns the first child element with the given name. */
private int getFirstChildElement(int nodeIndex, String elementName) {
int childIndex = getFirstChildElement(nodeIndex);
if (childIndex != -1) {
while (childIndex != -1) {
String nodeName = fDeferredDocumentImpl.getNodeNameString(childIndex, false);
if (nodeName.equals(elementName)) {
return childIndex;
}
childIndex = getNextSiblingElement(childIndex);
}
}
return -1;
}
/** Returns the next sibling element with the given name. */
private int getNextSiblingElement(int nodeIndex, String elementName) {
int siblingIndex = getNextSiblingElement(nodeIndex);
if (siblingIndex != -1) {
while (siblingIndex != -1) {
String nodeName = fDeferredDocumentImpl.getNodeNameString(siblingIndex, false);
if (nodeName.equals(elementName)) {
return siblingIndex;
}
siblingIndex = getNextSiblingElement(siblingIndex);
}
}
return -1;
}
/** Returns the first child element with the given name. */
private int getFirstChildElement(int nodeIndex, String elemName, String attrName, String attrValue) {
int childIndex = getFirstChildElement(nodeIndex, elemName);
if (childIndex != -1) {
while (childIndex != -1) {
int attrIndex = fDeferredDocumentImpl.getNodeValue(childIndex, false);
while (attrIndex != -1) {
String nodeName = fDeferredDocumentImpl.getNodeNameString(attrIndex, false);
if (nodeName.equals(attrName)) {
// REVISIT: Do we need to normalize the text? -Ac
int textIndex = fDeferredDocumentImpl.getFirstChild(attrIndex, false);
String nodeValue = fDeferredDocumentImpl.getNodeValueString(textIndex, false);
if (nodeValue.equals(attrValue)) {
return childIndex;
}
}
attrIndex = fDeferredDocumentImpl.getNextSibling(attrIndex, false);
}
childIndex = getNextSiblingElement(childIndex, elemName);
}
}
return -1;
}
/** Returns the next sibling element with the given name and attribute. */
private int getNextSiblingElement(int nodeIndex, String elemName, String attrName, String attrValue) {
int siblingIndex = getNextSiblingElement(nodeIndex, elemName);
if (siblingIndex != -1) {
int attributeNameIndex = fStringPool.addSymbol(attrName);
while (siblingIndex != -1) {
int attrIndex = fDeferredDocumentImpl.getNodeValue(siblingIndex, false);
while (attrIndex != -1) {
int attrValueIndex = fDeferredDocumentImpl.getNodeValue(attrIndex, false);
if (attrValue.equals(fStringPool.toString(attrValueIndex))) {
return siblingIndex;
}
attrIndex = fDeferredDocumentImpl.getNextSibling(attrIndex, false);
}
siblingIndex = getNextSiblingElement(siblingIndex, elemName);
}
}
return -1;
}
/**
* Copies the source tree into the specified place in a destination
* tree. The source node and its children are appended as children
* of the destination node.
* <p>
* <em>Note:</em> This is an iterative implementation.
*/
private void copyInto(Node src, int destIndex) throws Exception {
// for ignorable whitespace features
boolean domimpl = src != null && src instanceof DocumentImpl;
// placement variables
Node start = src;
Node parent = src;
Node place = src;
// traverse source tree
while (place != null) {
// copy this node
int nodeIndex = -1;
short type = place.getNodeType();
switch (type) {
case Node.CDATA_SECTION_NODE: {
boolean ignorable = domimpl && ((TextImpl)place).isIgnorableWhitespace();
nodeIndex = fDeferredDocumentImpl.createCDATASection(fStringPool.addString(place.getNodeValue()), ignorable);
break;
}
case Node.COMMENT_NODE: {
nodeIndex = fDeferredDocumentImpl.createComment(fStringPool.addString(place.getNodeValue()));
break;
}
case Node.ELEMENT_NODE: {
XMLAttrList attrList = null;
int handle = -1;
NamedNodeMap attrs = place.getAttributes();
if (attrs != null) {
int length = attrs.getLength();
if (length > 0) {
handle = fAttrList.startAttrList();
for (int i = 0; i < length; i++) {
Attr attr = (Attr)attrs.item(i);
String attrName = attr.getNodeName();
String attrValue = attr.getNodeValue();
fAttrList.addAttr(
fStringPool.addSymbol(attrName),
fStringPool.addString(attrValue),
fStringPool.addSymbol("CDATA"), // REVISIT
attr.getSpecified(),
false); // search
}
fAttrList.endAttrList();
attrList = fAttrList;
}
}
nodeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol(place.getNodeName()), attrList, handle);
break;
}
case Node.ENTITY_REFERENCE_NODE: {
nodeIndex = fDeferredDocumentImpl.createEntityReference(fStringPool.addSymbol(place.getNodeName()));
break;
}
case Node.PROCESSING_INSTRUCTION_NODE: {
nodeIndex = fDeferredDocumentImpl.createProcessingInstruction(fStringPool.addSymbol(place.getNodeName()), fStringPool.addString(place.getNodeValue()));
break;
}
case Node.TEXT_NODE: {
boolean ignorable = domimpl && ((TextImpl)place).isIgnorableWhitespace();
nodeIndex = fDeferredDocumentImpl.createTextNode(fStringPool.addString(place.getNodeValue()), ignorable);
break;
}
default: {
throw new IllegalArgumentException("PAR010 Can't copy node type, "+
type+" ("+
place.getNodeName()+')'
+"\n"+type+"\t"+place.getNodeName());
}
}
fDeferredDocumentImpl.appendChild(destIndex, nodeIndex);
// iterate over children
if (place.hasChildNodes()) {
parent = place;
place = place.getFirstChild();
destIndex = nodeIndex;
}
// advance
else {
place = place.getNextSibling();
while (place == null && parent != start) {
place = parent.getNextSibling();
parent = parent.getParentNode();
destIndex = fDeferredDocumentImpl.getParentNode(destIndex, false);
}
}
}
} // copyInto(Node,int)
/** Creates the content model elements for the deferred DOM tree. */
private int createDeferredContentModel(Node model) throws Exception {
int nodeType = model.getNodeType();
switch (nodeType) {
case Node.ELEMENT_NODE: {
NamedNodeMap attrs = model.getAttributes();
int handle = fAttrList.startAttrList();
int length = attrs.getLength();
for (int i = 0; i < length; i++) {
Attr attr = (Attr)attrs.item(i);
String attrName = attr.getNodeName();
String attrValue = attr.getNodeValue();
fAttrList.addAttr(
fStringPool.addSymbol(attrName),
fStringPool.addString(attrValue),
fStringPool.addSymbol((String)TYPES.get(attrName)),
attr.getSpecified(),
false); // search
}
fAttrList.endAttrList();
int modelIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol(model.getNodeName()), fAttrList, handle);
Node child = model.getFirstChild();
while (child != null) {
int childIndex = createDeferredContentModel(child);
fDeferredDocumentImpl.appendChild(modelIndex, childIndex);
child = child.getNextSibling();
}
return modelIndex;
}
case Node.TEXT_NODE: {
return fDeferredDocumentImpl.createTextNode(fStringPool.addString(model.getNodeValue()), false);
}
}
return -1;
} // createDeferredContentModel(Node):int
} // class DOMParser
|
src/org/apache/xerces/parsers/DOMParser.java
|
/*
* The Apache Software License, Version 1.1
*
*
* Copyright (c) 1999 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution,
* if any, must include the following acknowledgment:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowledgment may appear in the software itself,
* if and wherever such third-party acknowledgments normally appear.
*
* 4. The names "Xerces" and "Apache Software Foundation" must
* not be used to endorse or promote products derived from this
* software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache",
* nor may "Apache" appear in their name, without prior written
* permission of the Apache Software Foundation.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation and was
* originally based on software copyright (c) 1999, International
* Business Machines, Inc., http://www.apache.org. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
package org.apache.xerces.parsers;
import java.io.IOException;
import java.util.Hashtable;
import java.util.StringTokenizer;
import org.apache.xerces.dom.TextImpl;
import org.apache.xerces.framework.XMLAttrList;
import org.apache.xerces.framework.XMLContentSpecNode;
import org.apache.xerces.framework.XMLParser;
import org.apache.xerces.framework.XMLValidator;
import org.apache.xerces.readers.XMLEntityHandler;
import org.apache.xerces.utils.StringPool;
import org.apache.xerces.validators.schema.XUtil;
import org.apache.xerces.dom.DeferredDocumentImpl;
import org.apache.xerces.dom.DocumentImpl;
import org.apache.xerces.dom.DocumentTypeImpl;
import org.apache.xerces.dom.NodeImpl;
import org.apache.xerces.dom.EntityImpl;
import org.apache.xerces.dom.NotationImpl;
import org.apache.xerces.dom.ElementDefinitionImpl;
import org.apache.xerces.dom.AttrImpl;
import org.apache.xerces.dom.TextImpl;
import org.apache.xerces.dom.ElementImpl;
import org.w3c.dom.Attr;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Element;
import org.w3c.dom.Entity;
import org.w3c.dom.EntityReference;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
// REVISIT: Change use of AttributeList to Attributes. -Ac
import org.xml.sax.AttributeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
import org.xml.sax.SAXParseException;
/**
* DOMParser provides a parser which produces a W3C DOM tree as its output
*
* @version
*/
public class DOMParser
extends XMLParser
{
//
// Constants
//
// public
/** Default programmatic document class name (org.apache.xerces.dom.DocumentImpl). */
public static final String DEFAULT_DOCUMENT_CLASS_NAME = "org.apache.xerces.dom.DocumentImpl";
// debugging
/** Set to true to debug attribute list declaration calls. */
private static final boolean DEBUG_ATTLIST_DECL = false;
// features and properties
/** Features recognized by this parser. */
private static final String RECOGNIZED_FEATURES[] = {
// SAX2 core features
// Xerces features
"http://apache.org/xml/features/dom/defer-node-expansion",
"http://apache.org/xml/features/dom/create-entity-ref-nodes",
"http://apache.org/xml/features/dom/include-ignorable-whitespace",
// Experimental features
"http://apache.org/xml/features/domx/grammar-access",
};
/** Properties recognized by this parser. */
private static final String RECOGNIZED_PROPERTIES[] = {
// SAX2 core properties
// Xerces properties
"http://apache.org/xml/properties/dom/document-class-name",
"http://apache.org/xml/properties/dom/current-element-node",
};
/** For experimental grammar access. */
private static final Hashtable TYPES = new Hashtable();
//
// Data
//
// common data
protected Document fDocument;
// deferred expansion data
protected DeferredDocumentImpl fDeferredDocumentImpl;
protected int fDocumentIndex;
protected int fDocumentTypeIndex;
protected int fCurrentNodeIndex;
// full expansion data
protected DocumentImpl fDocumentImpl;
protected DocumentType fDocumentType;
protected Node fCurrentElementNode;
// state
protected boolean fWithinElement;
protected boolean fInCDATA;
// features
private boolean fGrammarAccess;
// properties
// REVISIT: Even though these have setters and getters, should they
// be protected visibility? -Ac
private String fDocumentClassName;
private boolean fDeferNodeExpansion;
private boolean fCreateEntityReferenceNodes;
private boolean fIncludeIgnorableWhitespace;
// built-in entities
protected int fAmpIndex;
protected int fLtIndex;
protected int fGtIndex;
protected int fAposIndex;
protected int fQuotIndex;
private boolean fSeenRootElement;
private XMLAttrList fAttrList;
//
// Static initializer
//
static {
String types[][] = {
{ "CDATA", "minOccurs", "maxOccurs" },
{ "ENUMERATION", "collection", "order", "export" },
{ "NMTOKEN", "name", "ref" },
};
for (int i = 0; i < types.length; i++) {
String typeName = types[i][0];
for (int j = 1; j < types[i].length; j++) {
TYPES.put(types[i][j], typeName);
}
}
}
//
// Constructors
//
/** Default constructor. */
public DOMParser() {
// setup parser state
init();
// set default values
try {
setDocumentClassName(DEFAULT_DOCUMENT_CLASS_NAME);
setCreateEntityReferenceNodes(true);
setDeferNodeExpansion(true);
setIncludeIgnorableWhitespace(true);
} catch (SAXException e) {
throw new RuntimeException("PAR001 Fatal error constructing DOMParser.");
}
} // <init>()
//
// Public methods
//
// document
/** Returns the document. */
public Document getDocument() {
if (fDocumentImpl != null) {
fDocumentImpl.setErrorChecking(true);
}
return fDocument;
}
// features and properties
/**
* Returns a list of features that this parser recognizes.
* This method will never return null; if no features are
* recognized, this method will return a zero length array.
*
* @see #isFeatureRecognized
* @see #setFeature
* @see #getFeature
*/
public String[] getFeaturesRecognized() {
// get features that super/this recognizes
String superRecognized[] = super.getFeaturesRecognized();
String thisRecognized[] = RECOGNIZED_FEATURES;
// is one or the other the empty set?
int thisLength = thisRecognized.length;
if (thisLength == 0) {
return superRecognized;
}
int superLength = superRecognized.length;
if (superLength == 0) {
return thisRecognized;
}
// combine the two lists and return
String recognized[] = new String[superLength + thisLength];
System.arraycopy(superRecognized, 0, recognized, 0, superLength);
System.arraycopy(thisRecognized, 0, recognized, superLength, thisLength);
return recognized;
} // getFeaturesRecognized():String[]
/**
* Returns a list of properties that this parser recognizes.
* This method will never return null; if no properties are
* recognized, this method will return a zero length array.
*
* @see #isPropertyRecognized
* @see #setProperty
* @see #getProperty
*/
public String[] getPropertiesRecognized() {
// get properties that super/this recognizes
String superRecognized[] = super.getPropertiesRecognized();
String thisRecognized[] = RECOGNIZED_PROPERTIES;
// is one or the other the empty set?
int thisLength = thisRecognized.length;
if (thisLength == 0) {
return superRecognized;
}
int superLength = superRecognized.length;
if (superLength == 0) {
return thisRecognized;
}
// combine the two lists and return
String recognized[] = new String[superLength + thisLength];
System.arraycopy(superRecognized, 0, recognized, 0, superLength);
System.arraycopy(thisRecognized, 0, recognized, superLength, thisLength);
return recognized;
}
// resetting
/** Resets the parser. */
public void reset() throws Exception {
super.reset();
init();
}
/** Resets or copies the parser. */
public void resetOrCopy() throws Exception {
super.resetOrCopy();
init();
}
//
// Protected methods
//
// initialization
/**
* Initializes the parser to a pre-parse state. This method is
* called between calls to <code>parse()</code>.
*/
protected void init() {
// init common
fDocument = null;
// init deferred expansion
fDeferredDocumentImpl = null;
fDocumentIndex = -1;
fDocumentTypeIndex = -1;
fCurrentNodeIndex = -1;
// init full expansion
fDocumentImpl = null;
fDocumentType = null;
fCurrentElementNode = null;
// state
fWithinElement = false;
fInCDATA = false;
// built-in entities
fAmpIndex = fStringPool.addSymbol("amp");
fLtIndex = fStringPool.addSymbol("lt");
fGtIndex = fStringPool.addSymbol("gt");
fAposIndex = fStringPool.addSymbol("apos");
fQuotIndex = fStringPool.addSymbol("quot");
setSendCharDataAsCharArray(false);
fSeenRootElement = false;
fAttrList = new XMLAttrList(fStringPool);
} // init()
// features
/**
* This method sets whether the expansion of the nodes in the default
* DOM implementation are deferred.
*
* @see #getDeferNodeExpansion
* @see #setDocumentClassName
*/
protected void setDeferNodeExpansion(boolean deferNodeExpansion)
throws SAXNotRecognizedException, SAXNotSupportedException {
fDeferNodeExpansion = deferNodeExpansion;
}
/**
* Returns true if the expansion of the nodes in the default DOM
* implementation are deferred.
*
* @see #setDeferNodeExpansion
*/
protected boolean getDeferNodeExpansion()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fDeferNodeExpansion;
}
/**
* This feature determines whether entity references within
* the document are included in the document tree as
* EntityReference nodes.
* <p>
* Note: The children of the entity reference are always
* added to the document. This feature only affects
* whether an EntityReference node is also included
* as the parent of the entity reference children.
*
* @param create True to create entity reference nodes; false
* to only insert the entity reference children.
*
* @see #getCreateEntityReferenceNodes
*/
protected void setCreateEntityReferenceNodes(boolean create)
throws SAXNotRecognizedException, SAXNotSupportedException {
fCreateEntityReferenceNodes = create;
}
/**
* Returns true if entity references within the document are
* included in the document tree as EntityReference nodes.
*
* @see #setCreateEntityReferenceNodes
*/
public boolean getCreateEntityReferenceNodes()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fCreateEntityReferenceNodes;
}
/**
* This feature determines whether text nodes that can be
* considered "ignorable whitespace" are included in the DOM
* tree.
* <p>
* Note: The only way that the parser can determine if text
* is ignorable is by reading the associated grammar
* and having a content model for the document. When
* ignorable whitespace text nodes *are* included in
* the DOM tree, they will be flagged as ignorable.
* The ignorable flag can be queried by calling the
* TextImpl#isIgnorableWhitespace():boolean method.
*
* @param include True to include ignorable whitespace text nodes;
* false to not include ignorable whitespace text
* nodes.
*
* @see #getIncludeIgnorableWhitespace
*/
public void setIncludeIgnorableWhitespace(boolean include)
throws SAXNotRecognizedException, SAXNotSupportedException {
fIncludeIgnorableWhitespace = include;
}
/**
* Returns true if ignorable whitespace text nodes are included
* in the DOM tree.
*
* @see #setIncludeIgnorableWhitespace
*/
public boolean getIncludeIgnorableWhitespace()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fIncludeIgnorableWhitespace;
}
// properties
/**
* This method allows the programmer to decide which document
* factory to use when constructing the DOM tree. However, doing
* so will lose the functionality of the default factory. Also,
* a document class other than the default will lose the ability
* to defer node expansion on the DOM tree produced.
*
* @param documentClassName The fully qualified class name of the
* document factory to use when constructing
* the DOM tree.
*
* @see #getDocumentClassName
* @see #setDeferNodeExpansion
* @see #DEFAULT_DOCUMENT_CLASS_NAME
*/
protected void setDocumentClassName(String documentClassName)
throws SAXNotRecognizedException, SAXNotSupportedException {
// normalize class name
if (documentClassName == null) {
documentClassName = DEFAULT_DOCUMENT_CLASS_NAME;
}
// verify that this class exists and is of the right type
try {
Class _class = Class.forName(documentClassName);
//if (!_class.isAssignableFrom(Document.class)) {
if (!Document.class.isAssignableFrom(_class)) {
throw new IllegalArgumentException("PAR002 Class, \""+documentClassName+"\", is not of type org.w3c.dom.Document."+"\n"+documentClassName);
}
}
catch (ClassNotFoundException e) {
throw new IllegalArgumentException("PAR003 Class, \""+documentClassName+"\", not found."+"\n"+documentClassName);
}
// set document class name
fDocumentClassName = documentClassName;
if (!documentClassName.equals(DEFAULT_DOCUMENT_CLASS_NAME)) {
setDeferNodeExpansion(false);
}
} // setDocumentClassName(String)
/**
* Returns the fully qualified class name of the document factory
* used when constructing the DOM tree.
*
* @see #setDocumentClassName
*/
protected String getDocumentClassName()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fDocumentClassName;
}
/**
* Returns the current element node.
* <p>
* Note: This method is not supported when the "deferNodeExpansion"
* property is set to true and the document factory is set to
* the default factory.
*/
protected Element getCurrentElementNode()
throws SAXNotRecognizedException, SAXNotSupportedException {
if (fCurrentElementNode != null &&
fCurrentElementNode.getNodeType() == Node.ELEMENT_NODE) {
return (Element)fCurrentElementNode;
}
return null;
} // getCurrentElementNode():Element
//
// Configurable methods
//
/**
* Set the state of any feature in a SAX2 parser. The parser
* might not recognize the feature, and if it does recognize
* it, it might not be able to fulfill the request.
*
* @param featureId The unique identifier (URI) of the feature.
* @param state The requested state of the feature (true or false).
*
* @exception SAXNotRecognizedException If the requested feature is
* not known.
* @exception SAXNotSupportedException If the requested feature is
* known, but the requested state
* is not supported.
*/
public void setFeature(String featureId, boolean state)
throws SAXNotRecognizedException, SAXNotSupportedException {
//
// SAX2 core features
//
if (featureId.startsWith(SAX2_FEATURES_PREFIX)) {
//
// No additional SAX properties defined for DOMParser.
// Pass request off to XMLParser for the common cases.
//
}
//
// Xerces features
//
else if (featureId.startsWith(XERCES_FEATURES_PREFIX)) {
String feature = featureId.substring(XERCES_FEATURES_PREFIX.length());
//
// http://apache.org/xml/features/dom/defer-node-expansion
// Allows the document tree returned by getDocument()
// to be constructed lazily. In other words, the DOM
// nodes are constructed as the tree is traversed.
// This allows the document to be returned sooner with
// the expense of holding all of the blocks of character
// data held in memory. Then again, lots of DOM nodes
// use a lot of memory as well.
//
if (feature.equals("dom/defer-node-expansion")) {
if (fParseInProgress) {
throw new SAXNotSupportedException("PAR004 Cannot setFeature("+featureId + "): parse is in progress."+"\n"+featureId);
}
setDeferNodeExpansion(state);
return;
}
//
// http://apache.org/xml/features/dom/create-entity-ref-nodes
// This feature determines whether entity references within
// the document are included in the document tree as
// EntityReference nodes.
// Note: The children of the entity reference are always
// added to the document. This feature only affects
// whether an EntityReference node is also included
// as the parent of the entity reference children.
//
if (feature.equals("dom/create-entity-ref-nodes")) {
setCreateEntityReferenceNodes(state);
return;
}
//
// http://apache.org/xml/features/dom/include-ignorable-whitespace
// This feature determines whether text nodes that can be
// considered "ignorable whitespace" are included in the DOM
// tree.
// Note: The only way that the parser can determine if text
// is ignorable is by reading the associated grammar
// and having a content model for the document. When
// ignorable whitespace text nodes *are* included in
// the DOM tree, they will be flagged as ignorable.
// The ignorable flag can be queried by calling the
// TextImpl#isIgnorableWhitespace():boolean method.
//
if (feature.equals("dom/include-ignorable-whitespace")) {
setIncludeIgnorableWhitespace(state);
return;
}
//
// Experimental features
//
//
// http://apache.org/xml/features/domx/grammar-access
// Allows grammar access in the DOM tree. Currently, this
// means that there is an XML Schema document tree as a
// child of the Doctype node.
//
if (feature.equals("domx/grammar-access")) {
fGrammarAccess = state;
return;
}
//
// Pass request off to XMLParser for the common cases.
//
}
//
// Pass request off to XMLParser for the common cases.
//
super.setFeature(featureId, state);
} // setFeature(String,boolean)
/**
* Query the current state of any feature in a SAX2 parser. The
* parser might not recognize the feature.
*
* @param featureId The unique identifier (URI) of the feature
* being set.
*
* @return The current state of the feature.
*
* @exception SAXNotRecognizedException If the requested feature is
* not known.
*/
public boolean getFeature(String featureId)
throws SAXNotRecognizedException, SAXNotSupportedException {
//
// SAX2 core features
//
if (featureId.startsWith(SAX2_FEATURES_PREFIX)) {
//
// No additional SAX properties defined for DOMParser.
// Pass request off to XMLParser for the common cases.
//
}
//
// Xerces features
//
else if (featureId.startsWith(XERCES_FEATURES_PREFIX)) {
String feature = featureId.substring(XERCES_FEATURES_PREFIX.length());
//
// http://apache.org/xml/features/dom/defer-node-expansion
// Allows the document tree returned by getDocument()
// to be constructed lazily. In other words, the DOM
// nodes are constructed as the tree is traversed.
// This allows the document to be returned sooner with
// the expense of holding all of the blocks of character
// data held in memory. Then again, lots of DOM nodes
// use a lot of memory as well.
//
if (feature.equals("dom/defer-node-expansion")) {
return getDeferNodeExpansion();
}
//
// http://apache.org/xml/features/dom/create-entity-ref-nodes
// This feature determines whether entity references within
// the document are included in the document tree as
// EntityReference nodes.
// Note: The children of the entity reference are always
// added to the document. This feature only affects
// whether an EntityReference node is also included
// as the parent of the entity reference children.
//
else if (feature.equals("dom/create-entity-ref-nodes")) {
return getCreateEntityReferenceNodes();
}
//
// http://apache.org/xml/features/dom/include-ignorable-whitespace
// This feature determines whether text nodes that can be
// considered "ignorable whitespace" are included in the DOM
// tree.
// Note: The only way that the parser can determine if text
// is ignorable is by reading the associated grammar
// and having a content model for the document. When
// ignorable whitespace text nodes *are* included in
// the DOM tree, they will be flagged as ignorable.
// The ignorable flag can be queried by calling the
// TextImpl#isIgnorableWhitespace():boolean method.
//
if (feature.equals("dom/include-ignorable-whitespace")) {
return getIncludeIgnorableWhitespace();
}
//
// Experimental features
//
//
// http://apache.org/xml/features/domx/grammar-access
// Allows grammar access in the DOM tree. Currently, this
// means that there is an XML Schema document tree as a
// child of the Doctype node.
//
if (feature.equals("domx/grammar-access")) {
return fGrammarAccess;
}
//
// Pass request off to XMLParser for the common cases.
//
}
//
// Pass request off to XMLParser for the common cases.
//
return super.getFeature(featureId);
} // getFeature(String):boolean
/**
* Set the value of any property in a SAX2 parser. The parser
* might not recognize the property, and if it does recognize
* it, it might not support the requested value.
*
* @param propertyId The unique identifier (URI) of the property
* being set.
* @param Object The value to which the property is being set.
*
* @exception SAXNotRecognizedException If the requested property is
* not known.
* @exception SAXNotSupportedException If the requested property is
* known, but the requested
* value is not supported.
*/
public void setProperty(String propertyId, Object value)
throws SAXNotRecognizedException, SAXNotSupportedException {
//
// Xerces properties
//
if (propertyId.startsWith(XERCES_PROPERTIES_PREFIX)) {
String property = propertyId.substring(XERCES_PROPERTIES_PREFIX.length());
//
// http://apache.org/xml/properties/dom/current-element-node
// Returns the current element node as the DOM Parser is
// parsing. This property is useful for determining the
// relative location of the document when an error is
// encountered. Note that this feature does *not* work
// when the http://apache.org/xml/features/dom/defer-node-expansion
// is set to true.
//
if (property.equals("dom/current-element-node")) {
throw new SAXNotSupportedException("PAR005 Property, \""+propertyId+"\" is read-only.\n"+propertyId);
}
//
// http://apache.org/xml/properties/dom/document-class-name
// This property can be used to set/query the name of the
// document factory.
//
else if (property.equals("dom/document-class-name")) {
if (value != null && !(value instanceof String)) {
throw new SAXNotSupportedException("PAR006 Property value must be of type java.lang.String.");
}
setDocumentClassName((String)value);
return;
}
}
//
// Pass request off to XMLParser for the common cases.
//
super.setProperty(propertyId, value);
} // setProperty(String,Object)
/**
* Return the current value of a property in a SAX2 parser.
* The parser might not recognize the property.
*
* @param propertyId The unique identifier (URI) of the property
* being set.
*
* @return The current value of the property.
*
* @exception SAXNotRecognizedException If the requested property is
* not known.
*
* @see Configurable#getProperty
*/
public Object getProperty(String propertyId)
throws SAXNotRecognizedException, SAXNotSupportedException {
//
// Xerces properties
//
if (propertyId.startsWith(XERCES_PROPERTIES_PREFIX)) {
String property = propertyId.substring(XERCES_PROPERTIES_PREFIX.length());
//
// http://apache.org/xml/properties/dom/current-element-node
// Returns the current element node as the DOM Parser is
// parsing. This property is useful for determining the
// relative location of the document when an error is
// encountered. Note that this feature does *not* work
// when the http://apache.org/xml/features/dom/defer-node-expansion
// is set to true.
//
if (property.equals("dom/current-element-node")) {
boolean throwException = false;
try {
throwException = getFeature(XERCES_FEATURES_PREFIX+"dom/defer-node-expansion");
}
catch (SAXNotSupportedException e) {
// ignore
}
catch (SAXNotRecognizedException e) {
// ignore
}
if (throwException) {
throw new SAXNotSupportedException("PAR007 Current element node cannot be queried when node expansion is deferred.");
}
return getCurrentElementNode();
}
//
// http://apache.org/xml/properties/dom/document-class-name
// This property can be used to set/query the name of the
// document factory.
//
else if (property.equals("dom/document-class-name")) {
return getDocumentClassName();
}
}
//
// Pass request off to XMLParser for the common cases.
//
return super.getProperty(propertyId);
} // getProperty(String):Object
//
// XMLParser methods
//
/** Start document. */
public void startDocument(int versionIndex, int encodingIndex,
int standAloneIndex) {
// clean up unused strings
if (versionIndex != -1) {
fStringPool.orphanString(versionIndex);
}
if (encodingIndex != -1) {
fStringPool.orphanString(encodingIndex);
}
if (standAloneIndex != -1) {
fStringPool.orphanString(standAloneIndex);
}
// deferred expansion
String documentClassName = null;
try {
documentClassName = getDocumentClassName();
} catch (SAXException e) {
throw new RuntimeException("PAR008 Fatal error getting document factory.");
}
boolean deferNodeExpansion = true;
try {
deferNodeExpansion = getDeferNodeExpansion();
} catch (SAXException e) {
throw new RuntimeException("PAR009 Fatal error reading expansion mode.");
}
if (documentClassName.equals(DEFAULT_DOCUMENT_CLASS_NAME) && deferNodeExpansion) {
boolean nsEnabled = false;
try { nsEnabled = getNamespaces(); }
catch (SAXException s) {}
fDocument = fDeferredDocumentImpl =
new DeferredDocumentImpl(fStringPool, nsEnabled, fGrammarAccess);
fDocumentIndex = fDeferredDocumentImpl.createDocument();
fCurrentNodeIndex = fDocumentIndex;
}
// full expansion
else {
if (documentClassName.equals(DEFAULT_DOCUMENT_CLASS_NAME)) {
fDocument = fDocumentImpl = new DocumentImpl(fGrammarAccess);
fDocumentImpl.setErrorChecking(false);
}
else {
try {
Class documentClass = Class.forName(documentClassName);
fDocument = (Document)documentClass.newInstance();
}
catch (Exception e) {
// REVISIT: We've already checked the type of the factory
// in the setDocumentClassName() method. The only
// exception that can occur here is if the class
// doesn't have a zero-arg constructor. -Ac
}
}
fCurrentElementNode = fDocument;
}
} // startDocument()
/** End document. */
public void endDocument() throws Exception {}
/** Report the start of the scope of a namespace declaration. */
public void startNamespaceDeclScope(int prefix, int uri) throws Exception {}
/** Report the end of the scope of a namespace declaration. */
public void endNamespaceDeclScope(int prefix) throws Exception {}
/** Start element. */
public void startElement(int elementTypeIndex,
XMLAttrList xmlAttrList, int attrListIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
int element = fDeferredDocumentImpl.createElement(elementTypeIndex, xmlAttrList, attrListIndex);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, element);
fCurrentNodeIndex = element;
fWithinElement = true;
// identifier registration
int index = xmlAttrList.getFirstAttr(attrListIndex);
while (index != -1) {
if (xmlAttrList.getAttType(index) == fStringPool.addSymbol("ID")) {
int nameIndex = xmlAttrList.getAttValue(index);
fDeferredDocumentImpl.putIdentifier(nameIndex, element);
}
index = xmlAttrList.getNextAttr(index);
}
// copy schema grammar, if needed
if (!fSeenRootElement) {
fSeenRootElement = true;
if (fGrammarAccess && fValidator == fSchemaValidator) {
Document schemaDocument = fSchemaValidator.getSchemaDocument();
if (schemaDocument != null) {
if (fDocumentTypeIndex == -1) {
fDocumentTypeIndex = fDeferredDocumentImpl.createDocumentType(elementTypeIndex, -1, -1);
fDeferredDocumentImpl.appendChild(0, fDocumentTypeIndex);
}
Element schema = schemaDocument.getDocumentElement();
copyInto(schema, fDocumentTypeIndex);
}
}
}
}
// full expansion
else {
boolean nsEnabled = false;
try { nsEnabled = getNamespaces(); }
catch (SAXException s) {}
String elementName = fStringPool.toString(elementTypeIndex);
AttributeList attrList = xmlAttrList.getAttributeList(attrListIndex);
Element e;
if (nsEnabled) {
e = fDocument.createElementNS(
fStringPool.toString(fStringPool.getURIForQName(elementTypeIndex)),
fStringPool.toString(elementTypeIndex)
);
} else {
e = fDocument.createElement(elementName);
}
int attrListLength = attrList.getLength();
for (int i = 0; i < attrListLength; i++) {
if (nsEnabled) {
int attName = xmlAttrList.getAttrName(i);
String attNameStr = fStringPool.toString(attName);
int nsURIIndex = fStringPool.getURIForQName(attName);
String namespaceURI = fStringPool.toString(nsURIIndex);
// DOM Level 2 wants all namespace declaration attributes
// to be bound to "http://www.w3.org/2000/xmlns/"
// So as long as the XML parser doesn't do it, it needs to
// done here.
int prefixIndex = fStringPool.getPrefixForQName(attName);
String prefix = fStringPool.toString(prefixIndex);
if (namespaceURI == null) {
if (prefix != null) {
if (prefix.equals("xmlns")) {
namespaceURI = "http://www.w3.org/2000/xmlns/";
}
} else if (attNameStr.equals("xmlns")) {
namespaceURI = "http://www.w3.org/2000/xmlns/";
}
}
e.setAttributeNS(namespaceURI,
attNameStr,
attrList.getValue(i));
} else {
String attrName = attrList.getName(i);
String attrValue = attrList.getValue(i);
e.setAttribute(attrName, attrValue);
if (fDocumentImpl != null && !xmlAttrList.isSpecified(i)) {
((AttrImpl)e.getAttributeNode(attrName)).setSpecified(false);
}
}
}
fCurrentElementNode.appendChild(e);
fCurrentElementNode = e;
fWithinElement = true;
// identifier registration
if (fDocumentImpl != null) {
int index = xmlAttrList.getFirstAttr(attrListIndex);
while (index != -1) {
if (xmlAttrList.getAttType(index) == fStringPool.addSymbol("ID")) {
String name = fStringPool.toString(xmlAttrList.getAttValue(index));
fDocumentImpl.putIdentifier(name, e);
}
index = xmlAttrList.getNextAttr(index);
}
}
xmlAttrList.releaseAttrList(attrListIndex);
// copy schema grammar, if needed
if (!fSeenRootElement) {
fSeenRootElement = true;
if (fDocumentImpl != null && fGrammarAccess && fValidator == fSchemaValidator) {
Document schemaDocument = fSchemaValidator.getSchemaDocument();
if (schemaDocument != null) {
if (fDocumentType == null) {
String rootName = elementName;
String systemId = ""; // REVISIT: How do we get this value? -Ac
String publicId = ""; // REVISIT: How do we get this value? -Ac
String internalSubset = ""; // This value is set later. -rip
fDocumentType = fDocumentImpl.createDocumentType(rootName, publicId,
systemId, internalSubset);
fDocument.appendChild(fDocumentType);
}
Element schema = schemaDocument.getDocumentElement();
XUtil.copyInto(schema, fDocumentType);
}
}
}
}
} // startElement(int,XMLAttrList,int)
/** End element. */
public void endElement(int elementTypeIndex)
throws Exception {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
fCurrentNodeIndex = fDeferredDocumentImpl.getParentNode(fCurrentNodeIndex, false);
fWithinElement = false;
}
// full node expansion
else {
fCurrentElementNode = fCurrentElementNode.getParentNode();
fWithinElement = false;
}
} // endElement(int)
/** Characters. */
public void characters(int dataIndex)
throws Exception {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int text;
if (fInCDATA) {
text = fDeferredDocumentImpl.createCDATASection(dataIndex, false);
} else {
// The Text normalization is taken care of within the Text Node
// in the DEFERRED case.
text = fDeferredDocumentImpl.createTextNode(dataIndex, false);
}
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, text);
}
// full node expansion
else {
Text text;
if (fInCDATA) {
text = fDocument.createCDATASection(fStringPool.orphanString(dataIndex));
}
else {
if (fWithinElement && fCurrentElementNode.getNodeType() == Node.ELEMENT_NODE) {
Node lastChild = fCurrentElementNode.getLastChild();
if (lastChild != null
&& lastChild.getNodeType() == Node.TEXT_NODE) {
// Normalization of Text Nodes - append rather than create.
((Text)lastChild).appendData(fStringPool.orphanString(dataIndex));
return;
}
}
text = fDocument.createTextNode(fStringPool.orphanString(dataIndex));
}
fCurrentElementNode.appendChild(text);
}
} // characters(int)
/** Ignorable whitespace. */
public void ignorableWhitespace(int dataIndex) throws Exception {
// ignore the whitespace
if (!fIncludeIgnorableWhitespace) {
fStringPool.orphanString(dataIndex);
return;
}
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int text;
if (fInCDATA) {
text = fDeferredDocumentImpl.createCDATASection(dataIndex, true);
} else {
// The Text normalization is taken care of within the Text Node
// in the DEFERRED case.
text = fDeferredDocumentImpl.createTextNode(dataIndex, true);
}
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, text);
}
// full node expansion
else {
Text text;
if (fInCDATA) {
text = fDocument.createCDATASection(fStringPool.orphanString(dataIndex));
}
else {
if (fWithinElement && fCurrentElementNode.getNodeType() == Node.ELEMENT_NODE) {
Node lastChild = fCurrentElementNode.getLastChild();
if (lastChild != null
&& lastChild.getNodeType() == Node.TEXT_NODE) {
// Normalization of Text Nodes - append rather than create.
((Text)lastChild).appendData(fStringPool.orphanString(dataIndex));
return;
}
}
text = fDocument.createTextNode(fStringPool.orphanString(dataIndex));
}
if (fDocumentImpl != null) {
((TextImpl)text).setIgnorableWhitespace(true);
}
fCurrentElementNode.appendChild(text);
}
} // ignorableWhitespace(int)
/** Processing instruction. */
public void processingInstruction(int targetIndex, int dataIndex)
throws Exception {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int pi = fDeferredDocumentImpl.createProcessingInstruction(targetIndex, dataIndex);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, pi);
}
// full node expansion
else {
String target = fStringPool.orphanString(targetIndex);
String data = fStringPool.orphanString(dataIndex);
ProcessingInstruction pi = fDocument.createProcessingInstruction(target, data);
fCurrentElementNode.appendChild(pi);
}
} // processingInstruction(int,int)
/** Comment. */
public void comment(int dataIndex) throws Exception {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int comment = fDeferredDocumentImpl.createComment(dataIndex);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, comment);
}
// full node expansion
else {
Comment comment = fDocument.createComment(fStringPool.orphanString(dataIndex));
fCurrentElementNode.appendChild(comment);
}
} // comment(int)
/** Not called. */
public void characters(char ch[], int start, int length) throws Exception {}
/** Not called. */
public void ignorableWhitespace(char ch[], int start, int length) throws Exception {}
//
// XMLDocumentScanner methods
//
/** Start CDATA section. */
public void startCDATA() throws Exception {
fInCDATA = true;
}
/** End CDATA section. */
public void endCDATA() throws Exception {
fInCDATA = false;
}
//
// XMLEntityHandler methods
//
/** Start entity reference. */
public void startEntityReference(int entityName, int entityType,
int entityContext) throws Exception {
// are we ignoring entity reference nodes?
if (!fCreateEntityReferenceNodes) {
return;
}
// ignore built-in entities
if (entityName == fAmpIndex ||
entityName == fGtIndex ||
entityName == fLtIndex ||
entityName == fAposIndex ||
entityName == fQuotIndex) {
return;
}
// we only support one context for entity references right now...
if (entityContext != XMLEntityHandler.CONTEXT_IN_CONTENT) {
return;
}
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int entityRefIndex = fDeferredDocumentImpl.createEntityReference(entityName);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, entityRefIndex);
fCurrentNodeIndex = entityRefIndex;
}
// full node expansion
else {
EntityReference er = fDocument.createEntityReference(fStringPool.toString(entityName));
fCurrentElementNode.appendChild(er);
fCurrentElementNode = er;
}
} // startEntityReference(int,int,int)
/** End entity reference. */
public void endEntityReference(int entityName, int entityType,
int entityContext) throws Exception {
// are we ignoring entity reference nodes?
if (!fCreateEntityReferenceNodes) {
return;
}
// ignore built-in entities
if (entityName == fAmpIndex ||
entityName == fGtIndex ||
entityName == fLtIndex ||
entityName == fAposIndex ||
entityName == fQuotIndex) {
return;
}
// we only support one context for entity references right now...
if (entityContext != XMLEntityHandler.CONTEXT_IN_CONTENT) {
return;
}
// deferred node expansion
if (fDeferredDocumentImpl != null) {
String name = fStringPool.toString(entityName);
int erChild = fCurrentNodeIndex;
fCurrentNodeIndex = fDeferredDocumentImpl.getParentNode(erChild, false);
// should never be true - we should not return here.
if (fDeferredDocumentImpl.getNodeType(erChild, false) != Node.ENTITY_REFERENCE_NODE) return;
erChild = fDeferredDocumentImpl.getFirstChild(erChild, false); // first Child of EntityReference
if (fDocumentTypeIndex != -1) {
// find Entity decl for this EntityReference.
int entityDecl = fDeferredDocumentImpl.getFirstChild(fDocumentTypeIndex, false);
while (entityDecl != -1) {
if (fDeferredDocumentImpl.getNodeType(entityDecl, false) == Node.ENTITY_NODE
&& fDeferredDocumentImpl.getNodeNameString(entityDecl, false).equals(name)) // string compare...
{
break;
}
entityDecl = fDeferredDocumentImpl.getNextSibling(entityDecl, false);
}
if (entityDecl != -1
&& fDeferredDocumentImpl.getFirstChild(entityDecl, false) == -1) {
// found entityDecl with same name as this reference
// AND it doesn't have any children.
// we don't need to iterate, because the whole structure
// should already be connected to the 1st child.
fDeferredDocumentImpl.setAsFirstChild(entityDecl, erChild);
}
}
}
// full node expansion
else {
Node erNode = fCurrentElementNode;//fCurrentElementNode.getParentNode();
fCurrentElementNode = erNode.getParentNode();
if (fDocumentImpl != null) {
NamedNodeMap entities = fDocumentType.getEntities();
String name = fStringPool.toString(entityName);
Node entityNode = entities.getNamedItem(name);
// We could simply return here if there is no entity for the reference.
if (entityNode == null || entityNode.hasChildNodes()) {
return;
}
Entity entity = (Entity)entityNode;
if (erNode.hasChildNodes()) {
NodeList list = erNode.getChildNodes();
int len = list.getLength();
for (int i = 0; i < len; i++) {
Node childClone = list.item(i).cloneNode(true);
entity.appendChild(childClone);
}
}
}
}
} // endEntityReference(int,int,int)
//
// DTDValidator.EventHandler methods
//
/**
* This function will be called when a <!DOCTYPE...> declaration is
* encountered.
*/
public void startDTD(int rootElementType, int publicId, int systemId)
throws Exception {
// full expansion
if (fDocumentImpl != null) {
String rootElementName = fStringPool.toString(rootElementType);
String publicString = fStringPool.toString(publicId);
String systemString = fStringPool.toString(systemId);
String internalSubset = ""; //REVIST: this value is set later -rip
fDocumentType = fDocumentImpl.
createDocumentType(rootElementName, publicString, systemString, internalSubset);
fDocumentImpl.appendChild(fDocumentType);
if (fGrammarAccess) {
Element schema = fDocument.createElement("schema");
// REVISIT: What should the namespace be? -Ac
schema.setAttribute("xmlns", "http://www.w3.org/1999/XMLSchema");
((AttrImpl)schema.getAttributeNode("xmlns")).setSpecified(false);
schema.setAttribute("finalDefault", "");
((AttrImpl)schema.getAttributeNode("finalDefault")).setSpecified(false);
schema.setAttribute("exactDefault", "");
((AttrImpl)schema.getAttributeNode("exactDefault")).setSpecified(false);
fDocumentType.appendChild(schema);
}
}
// deferred expansion
else if (fDeferredDocumentImpl != null) {
fDocumentTypeIndex =
fDeferredDocumentImpl.
createDocumentType(rootElementType, publicId, systemId);
fDeferredDocumentImpl.appendChild(fDocumentIndex, fDocumentTypeIndex);
if (fGrammarAccess) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("xmlns"),
fStringPool.addString("http://www.w3.org/1999/XMLSchema"),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("finalDefault"),
fStringPool.addString(""),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("exactDefault"),
fStringPool.addString(""),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.endAttrList();
int schemaIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("schema"), fAttrList, handle);
// REVISIT: What should the namespace be? -Ac
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, schemaIndex);
}
}
} // startDTD(int,int,int)
/**
* Supports DOM Level 2 internalSubset additions.
* Called when the internal subset is completely scanned.
*/
public void internalSubset(int internalSubset) {
//System.out.println("internalSubset callback:"+fStringPool.toString(internalSubset));
// full expansion
if (fDocumentImpl != null && fDocumentType != null) {
((DocumentTypeImpl)fDocumentType).setInternalSubset(fStringPool.toString(internalSubset));
}
// deferred expansion
else if (fDeferredDocumentImpl != null) {
fDeferredDocumentImpl.setInternalSubset(fDocumentTypeIndex, internalSubset);
}
}
/**
* This function will be called at the end of the DTD.
*/
public void endDTD() throws Exception {}
/**
* <!ELEMENT Name contentspec>
*/
public void elementDecl(int elementTypeIndex,
XMLValidator.ContentSpec contentSpec)
throws Exception {
if (DEBUG_ATTLIST_DECL) {
String contentModel = contentSpec.toString();
System.out.println("elementDecl(" + fStringPool.toString(elementTypeIndex) + ", " +
contentModel + ")");
}
//
// Create element declaration
//
if (fGrammarAccess) {
if (fDeferredDocumentImpl != null) {
//
// Build element
//
// get element declaration; create if necessary
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String elementName = fStringPool.toString(elementTypeIndex);
int elementIndex = getFirstChildElement(schemaIndex, "element", "name", elementName);
if (elementIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(elementName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("minOccurs"), // name
fStringPool.addString("1"), // value
fStringPool.addSymbol("NMTOKEN"), // type
false, // specified
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("nullable"), // name
fStringPool.addString("false"), // value
fStringPool.addSymbol("ENUMERATION"), // type
false, // specified
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("abstract"), // name
fStringPool.addString("false"), // value
fStringPool.addSymbol("ENUMERATION"), // type
false, // specified
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("final"), // name
fStringPool.addString("false"), // value
fStringPool.addSymbol("ENUMERATION"), // type
false, // specified
false); // search
fAttrList.endAttrList();
elementIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("element"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, elementIndex);
}
//
// Build content model
//
// <!ELEMENT name (#PCDATA)>
int contentType = contentSpec.getType();
String contentTypeName = fStringPool.toString(contentType);
XMLContentSpecNode node = new XMLContentSpecNode();
int contentSpecHandle = contentSpec.getHandle();
if (contentSpecHandle != -1) {
contentSpec.getNode(contentSpecHandle, node);
}
// (#PCDATA)
if (contentTypeName.equals("MIXED") && node.type == 0) {
int attrNameIndex = fStringPool.addSymbol("type");
int attrValueIndex = fStringPool.addString("string");
boolean attrSpecified = true;
int attrIndex = fDeferredDocumentImpl.createAttribute(attrNameIndex, attrValueIndex, attrSpecified);
fDeferredDocumentImpl.setAttributeNode(elementIndex, attrIndex);
}
// other content models
else {
// get type element; create if necessary
int typeIndex = getFirstChildElement(elementIndex, "type");
if (typeIndex == -1) {
typeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("type"), null, -1);
// REVISIT: Check for type redeclaration? -Ac
fDeferredDocumentImpl.insertBefore(elementIndex, typeIndex, getFirstChildElement(elementIndex));
}
// <!ELEMENT name EMPTY>
if (contentTypeName.equals("EMPTY")) {
int attributeIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("content"), fStringPool.addString("empty"), true);
fDeferredDocumentImpl.setAttributeNode(typeIndex, attributeIndex);
}
// <!ELEMENT name ANY>
else if (contentTypeName.equals("ANY")) {
int anyIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("any"), null, -1);
fDeferredDocumentImpl.insertBefore(typeIndex, anyIndex, getFirstChildElement(typeIndex));
}
// <!ELEMENT name (a,b,...)> or <!ELEMENT name (a|b|...)>
else if (contentTypeName.equals("CHILDREN")) {
int attributeIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("content"), fStringPool.addString("elementOnly"), true);
fDeferredDocumentImpl.setAttributeNode(typeIndex, attributeIndex);
//attributeIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("order"), fStringPool.addString("seq"), false);
//fDeferredDocumentImpl.setAttributeNode(typeIndex, attributeIndex);
int contentSpecIndex = contentSpec.getHandle();
contentSpec.getNode(contentSpecIndex, node);
Element model = createContentModel(contentSpec, node);
int modelIndex = createDeferredContentModel(model);
int firstChildIndex = getFirstChildElement(typeIndex);
fDeferredDocumentImpl.insertBefore(typeIndex, modelIndex, firstChildIndex);
}
// <!ELEMENT name (#PCDATA|a|...)*>
else {
// REVISIT: Any chance of getting other than MIXED? -Ac
// set content type
int attrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("content"), fStringPool.addString("mixed"), true);
fDeferredDocumentImpl.setAttributeNode(typeIndex, attrIndex);
// skip '*' node
contentSpec.getNode(node.value, node);
// add leaves (on descent)
do {
int index = node.value;
int handle = fAttrList.startAttrList();
contentSpec.getNode(node.otherValue, node);
String elementRefName = fStringPool.toString(node.value);
fAttrList.addAttr(
fStringPool.addSymbol("ref"),
fStringPool.addString(elementRefName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.endAttrList();
int elementRefIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("element"), fAttrList, handle);
fDeferredDocumentImpl.insertBefore(typeIndex, elementRefIndex, getFirstChildElement(typeIndex, "element"));
contentSpec.getNode(index, node);
} while (node.type != XMLContentSpecNode.CONTENTSPECNODE_LEAF);
}
}
} // if defer-node-expansion
else if (fDocumentImpl != null) {
//
// Build element
//
// get element declaration; create if necessary
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
String elementName = fStringPool.toString(elementTypeIndex);
Element element = XUtil.getFirstChildElement(schema, "element", "name", elementName);
if (element == null) {
element = fDocument.createElement("element");
element.setAttribute("name", elementName);
element.setAttribute("minOccurs", "1");
((AttrImpl)element.getAttributeNode("minOccurs")).setSpecified(false);
element.setAttribute("nullable", "false");
((AttrImpl)element.getAttributeNode("nullable")).setSpecified(false);
element.setAttribute("abstract", "false");
((AttrImpl)element.getAttributeNode("abstract")).setSpecified(false);
element.setAttribute("final", "false");
((AttrImpl)element.getAttributeNode("final")).setSpecified(false);
schema.appendChild(element);
}
//
// Build content model
//
// <!ELEMENT name (#PCDATA)>
int contentType = contentSpec.getType();
String contentTypeName = fStringPool.toString(contentType);
XMLContentSpecNode node = new XMLContentSpecNode();
int contentSpecHandle = contentSpec.getHandle();
if (contentSpecHandle != -1) {
contentSpec.getNode(contentSpecHandle, node);
}
// (#PCDATA)
if (contentTypeName.equals("MIXED") && node.type == 0) {
element.setAttribute("type", "string");
}
// other content models
else {
// get type element; create if necessary
Element type = XUtil.getFirstChildElement(element, "type");
if (type == null) {
type = fDocumentImpl.createElement("type");
// REVISIT: Check for type redeclaration? -Ac
element.insertBefore(type, XUtil.getFirstChildElement(element));
}
// <!ELEMENT name EMPTY>
if (contentTypeName.equals("EMPTY")) {
type.setAttribute("content", "empty");
}
// <!ELEMENT name ANY>
else if (contentTypeName.equals("ANY")) {
Element any = fDocumentImpl.createElement("any");
type.insertBefore(any, XUtil.getFirstChildElement(type));
}
// <!ELEMENT name (a,b,...)> or <!ELEMENT name (a|b|...)>
else if (contentTypeName.equals("CHILDREN")) {
type.setAttribute("content", "elementOnly");
//((AttrImpl)type.getAttributeNode("content")).setSpecified(false);
//type.setAttribute("order", "seq");
//((AttrImpl)type.getAttributeNode("order")).setSpecified(false);
int contentSpecIndex = contentSpec.getHandle();
contentSpec.getNode(contentSpecIndex, node);
Element model = createContentModel(contentSpec, node);
Node fragment = fDocument.createDocumentFragment();
XUtil.copyInto(model, fragment);
Element firstChild = XUtil.getFirstChildElement(type);
type.insertBefore(fragment, firstChild);
}
// <!ELEMENT name (#PCDATA|a|...)*>
else {
// REVISIT: Any chance of getting other than MIXED? -Ac
// set content type
type.setAttribute("content", "mixed");
// skip '*' node
contentSpec.getNode(node.value, node);
// add leaves (on descent)
do {
int index = node.value;
int handle = fAttrList.startAttrList();
contentSpec.getNode(node.otherValue, node);
String elementRefName = fStringPool.toString(node.value);
Element elementRef = fDocumentImpl.createElement("element");
elementRef.setAttribute("ref", elementRefName);
type.insertBefore(elementRef, XUtil.getFirstChildElement(type, "element"));
contentSpec.getNode(index, node);
} while (node.type != XMLContentSpecNode.CONTENTSPECNODE_LEAF);
}
}
} // if NOT defer-node-expansion
} // if grammar-access
} // elementDecl(int,String)
/**
* <!ATTLIST Name AttDef>
*/
public void attlistDecl(int elementTypeIndex,
int attrNameIndex, int attType,
String enumString,
int attDefaultType, int attDefaultValue)
throws Exception {
if (DEBUG_ATTLIST_DECL) {
System.out.println("attlistDecl(" + fStringPool.toString(elementTypeIndex) + ", " +
fStringPool.toString(attrNameIndex) + ", " +
fStringPool.toString(attType) + ", " +
enumString + ", " +
fStringPool.toString(attDefaultType) + ", " +
fStringPool.toString(attDefaultValue) + ")");
}
// deferred expansion
if (fDeferredDocumentImpl != null) {
// get the default value
if (attDefaultValue != -1) {
if (DEBUG_ATTLIST_DECL) {
System.out.println(" adding default attribute value: "+
fStringPool.toString(attDefaultValue));
}
// get element definition
int elementDefIndex = fDeferredDocumentImpl.lookupElementDefinition(elementTypeIndex);
// create element definition if not already there
if (elementDefIndex == -1) {
elementDefIndex = fDeferredDocumentImpl.createElementDefinition(elementTypeIndex);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, elementDefIndex);
}
// add default attribute
int attrIndex = fDeferredDocumentImpl.createAttribute(attrNameIndex, attDefaultValue, false);
fDeferredDocumentImpl.appendChild(elementDefIndex, attrIndex);
}
//
// Create attribute declaration
//
if (fGrammarAccess) {
// get element declaration; create it if necessary
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String elementName = fStringPool.toString(elementTypeIndex);
int elementIndex = getFirstChildElement(schemaIndex, "element", "name", elementName);
if (elementIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(elementName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); //search
fAttrList.addAttr(
fStringPool.addSymbol("export"),
fStringPool.addString("true"),
fStringPool.addSymbol("ENUMERATION"),
false,
false); // search
fAttrList.endAttrList();
elementIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("element"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, elementIndex);
}
// get type element; create it if necessary
int typeIndex = getFirstChildElement(elementIndex, "type");
if (typeIndex == -1) {
typeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("type"), null, -1);
fDeferredDocumentImpl.insertBefore(elementIndex, typeIndex, getFirstChildElement(elementIndex));
}
// create attribute and set its attributes
String attributeName = fStringPool.toString(attrNameIndex);
int attributeIndex = getFirstChildElement(elementIndex, "attribute", "name", attributeName);
if (attributeIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(attributeName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
/***
fAttrList.addAttr(
fStringPool.addSymbol("type"),
fStringPool.addString("string"),
fStringPool.addSymbol("CDATA"),
true,
false); // search
/***/
fAttrList.addAttr(
fStringPool.addSymbol("minOccurs"),
fStringPool.addString("0"),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("maxOccurs"),
fStringPool.addString("1"),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.endAttrList();
attributeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("attribute"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(typeIndex, attributeIndex);
// attribute type: CDATA, ENTITY, ... , NMTOKENS; ENUMERATION
String attributeTypeName = fStringPool.toString(attType);
if (attributeTypeName.equals("CDATA")) {
int typeAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("type"), fStringPool.addString("string"), false);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, typeAttrIndex);
}
else if (attributeTypeName.equals("ENUMERATION")) {
handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("source"),
fStringPool.addString("NMTOKEN"),
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.endAttrList();
int datatypeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("datatype"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(attributeIndex, datatypeIndex);
String tokenizerString = enumString.substring(1, enumString.length() - 1);
StringTokenizer tokenizer = new StringTokenizer(tokenizerString, "|");
while (tokenizer.hasMoreTokens()) {
handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("value"),
fStringPool.addString(tokenizer.nextToken()),
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.endAttrList();
int enumerationIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("enumeration"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(datatypeIndex, enumerationIndex);
}
}
else {
// REVISIT: Could we ever get an unknown data type? -Ac
int typeAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("type"), fStringPool.addString(attributeTypeName), true);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, typeAttrIndex);
}
// attribute default type: #IMPLIED, #REQUIRED, #FIXED
boolean fixed = false;
if (attDefaultType != -1) {
String attributeDefaultTypeName = fStringPool.toString(attDefaultType);
if (attributeDefaultTypeName.equals("#REQUIRED")) {
int minOccursAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("minOccurs"), fStringPool.addString("1"), true);
int oldMinOccursAttrIndex = fDeferredDocumentImpl.setAttributeNode(attributeIndex, minOccursAttrIndex);
fStringPool.releaseString(fDeferredDocumentImpl.getNodeValue(oldMinOccursAttrIndex, false));
}
else if (attributeDefaultTypeName.equals("#FIXED")) {
fixed = true;
int fixedAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("fixed"), attDefaultValue, true);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, fixedAttrIndex);
}
}
// attribute default value
if (!fixed && attDefaultValue != -1) {
int defaultAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("default"), attDefaultValue, true);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, defaultAttrIndex);
}
}
}
}
// full expansion
else if (fDocumentImpl != null) {
// get the default value
if (attDefaultValue != -1) {
if (DEBUG_ATTLIST_DECL) {
System.out.println(" adding default attribute value: "+
fStringPool.toString(attDefaultValue));
}
// get element name
String elementName = fStringPool.toString(elementTypeIndex);
// get element definition node
NamedNodeMap elements = ((DocumentTypeImpl)fDocumentType).getElements();
ElementDefinitionImpl elementDef = (ElementDefinitionImpl)elements.getNamedItem(elementName);
if (elementDef == null) {
elementDef = fDocumentImpl.createElementDefinition(elementName);
((DocumentTypeImpl)fDocumentType).getElements().setNamedItem(elementDef);
}
// REVISIT: Check for uniqueness of element name? -Ac
// get attribute name and value index
String attrName = fStringPool.toString(attrNameIndex);
String attrValue = fStringPool.toString(attDefaultValue);
// create attribute and set properties
AttrImpl attr = (AttrImpl)fDocumentImpl.createAttribute(attrName);
attr.setValue(attrValue);
attr.setSpecified(false);
// add default attribute to element definition
elementDef.getAttributes().setNamedItem(attr);
}
//
// Create attribute declaration
//
if (fGrammarAccess) {
// get element declaration; create it if necessary
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
String elementName = fStringPool.toString(elementTypeIndex);
Element element = XUtil.getFirstChildElement(schema, "element", "name", elementName);
if (element == null) {
element = fDocument.createElement("element");
element.setAttribute("name", elementName);
//element.setAttribute("export", "true");
//((AttrImpl)element.getAttributeNode("export")).setSpecified(false);
schema.appendChild(element);
}
// get type element; create it if necessary
Element type = XUtil.getFirstChildElement(element, "type");
if (type == null) {
type = fDocument.createElement("type");
// REVISIT: Check for archetype redeclaration? -Ac
element.insertBefore(type, XUtil.getFirstChildElement(element));
}
// create attribute and set its attributes
String attributeName = fStringPool.toString(attrNameIndex);
Element attribute = XUtil.getFirstChildElement(element, "attribute", "name", attributeName);
if (attribute == null) {
attribute = fDocument.createElement("attribute");
attribute.setAttribute("name", attributeName);
attribute.setAttribute("minOccurs", "0");
((AttrImpl)attribute.getAttributeNode("minOccurs")).setSpecified(false);
attribute.setAttribute("maxOccurs", "1");
((AttrImpl)attribute.getAttributeNode("maxOccurs")).setSpecified(false);
type.appendChild(attribute);
// attribute type: CDATA, ENTITY, ... , NMTOKENS; ENUMERATION
String attributeTypeName = fStringPool.toString(attType);
if (attributeTypeName.equals("CDATA")) {
attribute.setAttribute("type", "string");
((AttrImpl)attribute.getAttributeNode("type")).setSpecified(false);
}
else if (attributeTypeName.equals("ENUMERATION")) {
Element datatype = fDocumentImpl.createElement("datatype");
datatype.setAttribute("source", "NMTOKEN");
attribute.appendChild(datatype);
String tokenizerString = enumString.substring(1, enumString.length() - 1);
StringTokenizer tokenizer = new StringTokenizer(tokenizerString, "|");
while (tokenizer.hasMoreTokens()) {
Element enumeration = fDocument.createElement("enumeration");
enumeration.setAttribute("value", tokenizer.nextToken());
datatype.appendChild(enumeration);
}
}
else {
// REVISIT: Could we ever get an unknown data type? -Ac
attribute.setAttribute("type", attributeTypeName);
}
// attribute default type: #IMPLIED, #REQUIRED, #FIXED
boolean fixed = false;
if (attDefaultType != -1) {
String attributeDefaultTypeName = fStringPool.toString(attDefaultType);
if (attributeDefaultTypeName.equals("#REQUIRED")) {
attribute.setAttribute("minOccurs", "1");
((AttrImpl)attribute.getAttributeNode("minOccurs")).setSpecified(true);
}
else if (attributeDefaultTypeName.equals("#FIXED")) {
fixed = true;
String fixedValue = fStringPool.toString(attDefaultValue);
attribute.setAttribute("fixed", fixedValue);
}
}
// attribute default value
if (!fixed && attDefaultValue != -1) {
String defaultValue = fStringPool.toString(attDefaultValue);
attribute.setAttribute("default", defaultValue);
}
}
}
} // if NOT defer-node-expansion
} // attlistDecl(int,int,int,String,int,int)
/**
* <!ENTITY % Name EntityValue> (internal)
*/
public void internalPEDecl(int entityName, int entityValue) throws Exception {}
/**
* <!ENTITY % Name ExternalID> (external)
*/
public void externalPEDecl(int entityName, int publicId, int systemId) throws Exception {}
/**
* <!ENTITY Name EntityValue> (internal)
*/
public void internalEntityDecl(int entityNameIndex, int entityValueIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
if (fDocumentTypeIndex == -1) return; //revisit: should never happen. Exception?
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newEntityIndex = fDeferredDocumentImpl.createEntity(entityNameIndex, -1, -1, -1);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newEntityIndex);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// create internal entity declaration
if (fGrammarAccess) {
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String entityName = fStringPool.toString(entityNameIndex);
int textEntityIndex = getFirstChildElement(schemaIndex, "textEntity", "name", entityName);
if (textEntityIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(entityName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("export"),
fStringPool.addString("true"),
fStringPool.addSymbol("ENUMERATION"),
false,
false); // search
fAttrList.endAttrList();
textEntityIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("textEntity"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, textEntityIndex);
int textIndex = fDeferredDocumentImpl.createTextNode(entityValueIndex, false);
fDeferredDocumentImpl.appendChild(textEntityIndex, textIndex);
}
}
/***/
}
// full expansion
else if (fDocumentImpl != null) {
if (fDocumentType == null) return; //revisit: should never happen. Exception?
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String entityName = fStringPool.toString(entityNameIndex);
Entity entity = fDocumentImpl.createEntity(entityName);
fDocumentType.getEntities().setNamedItem(entity);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// create internal entity declaration
if (fGrammarAccess) {
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
Element textEntity = XUtil.getFirstChildElement(schema, "textEntity", "name", entityName);
if (textEntity == null) {
textEntity = fDocument.createElement("textEntity");
textEntity.setAttribute("name", entityName);
textEntity.setAttribute("export", "true");
((AttrImpl)textEntity.getAttributeNode("export")).setSpecified(false);
String entityValue = fStringPool.toString(entityValueIndex);
Text value = fDocument.createTextNode(entityValue);
textEntity.appendChild(value);
schema.appendChild(textEntity);
}
}
/***/
}
} // internalEntityDecl(int,int)
/**
* <!ENTITY Name ExternalID> (external)
*/
public void externalEntityDecl(int entityNameIndex, int publicIdIndex, int systemIdIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newEntityIndex = fDeferredDocumentImpl.createEntity(entityNameIndex, publicIdIndex, systemIdIndex, -1);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newEntityIndex);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// create external entity declaration
if (fGrammarAccess) {
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String entityName = fStringPool.toString(entityNameIndex);
int externalEntityIndex = getFirstChildElement(schemaIndex, "externalEntity", "name", entityName);
if (externalEntityIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(entityName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("export"),
fStringPool.addString("true"),
fStringPool.addSymbol("ENUMERATION"),
false,
false); // search
if (publicIdIndex != -1) {
fAttrList.addAttr(
fStringPool.addSymbol("public"),
publicIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
}
fAttrList.addAttr(
fStringPool.addSymbol("system"),
systemIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.endAttrList();
externalEntityIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("externalEntity"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, externalEntityIndex);
}
}
/***/
}
// full expansion
else if (fDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String entityName = fStringPool.toString(entityNameIndex);
String publicId = fStringPool.toString(publicIdIndex);
String systemId = fStringPool.toString(systemIdIndex);
EntityImpl entity = (EntityImpl)fDocumentImpl.createEntity(entityName);
if (publicIdIndex != -1) {
entity.setPublicId(publicId);
}
entity.setSystemId(systemId);
fDocumentType.getEntities().setNamedItem(entity);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// create external entity declaration
if (fGrammarAccess) {
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
Element externalEntity = XUtil.getFirstChildElement(schema, "externalEntity", "name", entityName);
if (externalEntity == null) {
externalEntity = fDocument.createElement("externalEntity");
externalEntity.setAttribute("name", entityName);
externalEntity.setAttribute("export", "true");
((AttrImpl)externalEntity.getAttributeNode("export")).setSpecified(false);
if (publicIdIndex != -1) {
externalEntity.setAttribute("public", publicId);
}
externalEntity.setAttribute("system", systemId);
schema.appendChild(externalEntity);
}
}
/***/
}
} // externalEntityDecl(int,int,int)
/**
* <!ENTITY Name ExternalID NDataDecl> (unparsed)
*/
public void unparsedEntityDecl(int entityNameIndex,
int publicIdIndex, int systemIdIndex,
int notationNameIndex) throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newEntityIndex = fDeferredDocumentImpl.createEntity(entityNameIndex, publicIdIndex, systemIdIndex, notationNameIndex);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newEntityIndex);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// add unparsed entity declaration
if (fGrammarAccess) {
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String entityName = fStringPool.toString(entityNameIndex);
int unparsedEntityIndex = getFirstChildElement(schemaIndex, "unparsedEntity", "name", entityName);
if (unparsedEntityIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(entityName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("export"),
fStringPool.addString("true"),
fStringPool.addSymbol("ENUMERATION"),
false,
false); // search
if (publicIdIndex != -1) {
fAttrList.addAttr(
fStringPool.addSymbol("public"),
publicIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
}
fAttrList.addAttr(
fStringPool.addSymbol("system"),
systemIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("notation"),
fStringPool.addString(fStringPool.toString(notationNameIndex)),
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.endAttrList();
unparsedEntityIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("unparsedEntity"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, unparsedEntityIndex);
}
}
/***/
}
// full expansion
else if (fDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String entityName = fStringPool.toString(entityNameIndex);
String publicId = fStringPool.toString(publicIdIndex);
String systemId = fStringPool.toString(systemIdIndex);
String notationName = fStringPool.toString(notationNameIndex);
EntityImpl entity = (EntityImpl)fDocumentImpl.createEntity(entityName);
if (publicIdIndex != -1) {
entity.setPublicId(publicId);
}
entity.setSystemId(systemId);
entity.setNotationName(notationName);
fDocumentType.getEntities().setNamedItem(entity);
/***
// REVISIT: Entities were removed from latest working draft. -Ac
// add unparsed entity declaration
if (fGrammarAccess) {
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
Element unparsedEntity = XUtil.getFirstChildElement(schema, "unparsedEntity", "name", entityName);
if (unparsedEntity == null) {
unparsedEntity = fDocument.createElement("unparsedEntity");
unparsedEntity.setAttribute("name", entityName);
unparsedEntity.setAttribute("export", "true");
((AttrImpl)unparsedEntity.getAttributeNode("export")).setSpecified(false);
if (publicIdIndex != -1) {
unparsedEntity.setAttribute("public", publicId);
}
unparsedEntity.setAttribute("system", systemId);
unparsedEntity.setAttribute("notation", notationName);
schema.appendChild(unparsedEntity);
}
}
/***/
}
} // unparsedEntityDecl(int,int,int,int)
/**
* <!NOTATION Name ExternalId>
*/
public void notationDecl(int notationNameIndex, int publicIdIndex, int systemIdIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newNotationIndex = fDeferredDocumentImpl.createNotation(notationNameIndex, publicIdIndex, systemIdIndex);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newNotationIndex);
// create notation declaration
if (fGrammarAccess) {
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
String notationName = fStringPool.toString(notationNameIndex);
int notationIndex = getFirstChildElement(schemaIndex, "notation", "name", notationName);
if (notationIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(notationName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
/***
fAttrList.addAttr(
fStringPool.addSymbol("export"),
fStringPool.addString("true"),
fStringPool.addSymbol("ENUMERATION"),
false,
false); // search
/***/
if (publicIdIndex == -1) {
publicIdIndex = 0; // empty string in string pool
}
fAttrList.addAttr(
fStringPool.addSymbol("public"),
publicIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
if (systemIdIndex != -1) {
fAttrList.addAttr(
fStringPool.addSymbol("system"),
systemIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
}
fAttrList.endAttrList();
notationIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("notation"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, notationIndex);
}
}
}
// full expansion
else if (fDocumentImpl != null) {
// REVISIT: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String notationName = fStringPool.toString(notationNameIndex);
String publicId = fStringPool.toString(publicIdIndex);
String systemId = fStringPool.toString(systemIdIndex);
NotationImpl notationImpl = (NotationImpl)fDocumentImpl.createNotation(notationName);
notationImpl.setPublicId(publicId);
if (systemIdIndex != -1) {
notationImpl.setSystemId(systemId);
}
fDocumentType.getNotations().setNamedItem(notationImpl);
// create notation declaration
if (fGrammarAccess) {
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
Element notation = XUtil.getFirstChildElement(schema, "notation", "name", notationName);
if (notation == null) {
notation = fDocument.createElement("notation");
notation.setAttribute("name", notationName);
//notation.setAttribute("export", "true");
//((AttrImpl)notation.getAttributeNode("export")).setSpecified(false);
if (publicId == null) {
publicId = "";
}
notation.setAttribute("public", publicId);
if (systemIdIndex != -1) {
notation.setAttribute("system", systemId);
}
schema.appendChild(notation);
}
}
}
} // notationDecl(int,int,int)
//
// Private methods
//
/**
* Creates a content model from the specified content spec node.
* This method will always return a <em>group</em> element as the
* containing element, even when the content model contains a
* single element reference.
*/
private Element createContentModel(XMLValidator.ContentSpec contentSpec,
XMLContentSpecNode node) {
Element model = createContentModel(contentSpec, node,
new DocumentImpl(), null);
return model;
} // createContentModel(XMLContentSpecNode):Element
/**
* This is the real <em>createContentModel</em> method. This is a
* recursive solution.
*/
private Element createContentModel(XMLValidator.ContentSpec contentSpec,
XMLContentSpecNode node,
Document factory,
Element parent) {
// figure out occurrence count
int minOccur = 1;
int maxOccur = 1;
switch (node.type) {
case XMLContentSpecNode.CONTENTSPECNODE_ONE_OR_MORE: {
minOccur = 1;
maxOccur = -1;
contentSpec.getNode(node.value, node);
break;
}
case XMLContentSpecNode.CONTENTSPECNODE_ZERO_OR_MORE: {
minOccur = 0;
maxOccur = -1;
contentSpec.getNode(node.value, node);
break;
}
case XMLContentSpecNode.CONTENTSPECNODE_ZERO_OR_ONE: {
minOccur = 0;
maxOccur = 1;
contentSpec.getNode(node.value, node);
break;
}
}
// flatten model
int nodeType = node.type;
switch (nodeType) {
// CHOICE or SEQUENCE
case XMLContentSpecNode.CONTENTSPECNODE_CHOICE:
case XMLContentSpecNode.CONTENTSPECNODE_SEQ: {
// go down left side
int leftIndex = node.value;
int rightIndex = node.otherValue;
contentSpec.getNode(leftIndex, node);
Element left = createContentModel(contentSpec, node,
factory, parent);
// go down right side
contentSpec.getNode(rightIndex, node);
Element right = createContentModel(contentSpec, node,
factory, null);
// append left children
String type = nodeType == XMLContentSpecNode.CONTENTSPECNODE_CHOICE
? "choice"
: "seq";
Element model = left;
if (!left.getAttribute("order").equals(type)) {
String minOccurs = left.getAttribute("minOccurs");
String maxOccurs = left.getAttribute("maxOccurs");
if (parent == null ||
((minOccurs.equals("1") || minOccurs.length() == 0) &&
(maxOccurs.equals("1") || maxOccurs.length() == 0))) {
model = factory.createElement("group");
model.setAttribute("order", type);
if (type.equals("seq")) {
((AttrImpl)model.getAttributeNode("order")).setSpecified(false);
}
model.appendChild(left);
}
else {
model = parent;
}
}
// set occurrence count
setOccurrenceCount(model, minOccur, maxOccur);
// append right children
model.appendChild(right);
// return model
return model;
}
// LEAF
case XMLContentSpecNode.CONTENTSPECNODE_LEAF: {
String name = fStringPool.toString(node.value);
Element leaf = factory.createElement("element");
leaf.setAttribute("ref", name);
// set occurrence count and return
setOccurrenceCount(leaf, minOccur, maxOccur);
return leaf;
}
} // switch node type
// error
return null;
} // createContentModel(XMLContentSpecNode,Element):Element
/**
* Sets the appropriate occurrence count attributes on the specified
* model element.
*/
private void setOccurrenceCount(Element model, int minOccur, int maxOccur) {
// min
model.setAttribute("minOccurs", Integer.toString(minOccur));
if (minOccur == 1) {
((AttrImpl)model.getAttributeNode("minOccurs")).setSpecified(false);
}
// max
if (maxOccur == -1) {
model.setAttribute("maxOccurs", "*");
}
else if (maxOccur != 1) {
model.setAttribute("maxOccurs", Integer.toString(maxOccur));
}
} // setOccurrenceCount(Element,int,int)
/** Returns the first child element of the specified node. */
private int getFirstChildElement(int nodeIndex) {
int childIndex = fDeferredDocumentImpl.getFirstChild(nodeIndex, false);
while (childIndex != -1) {
if (fDeferredDocumentImpl.getNodeType(childIndex, false) == Node.ELEMENT_NODE) {
return childIndex;
}
childIndex = fDeferredDocumentImpl.getNextSibling(childIndex, false);
}
return -1;
}
/** Returns the next sibling element of the specified node. */
private int getNextSiblingElement(int nodeIndex) {
int siblingIndex = fDeferredDocumentImpl.getNextSibling(nodeIndex, false);
while (siblingIndex != -1) {
if (fDeferredDocumentImpl.getNodeType(siblingIndex, false) == Node.ELEMENT_NODE) {
return siblingIndex;
}
siblingIndex = fDeferredDocumentImpl.getNextSibling(siblingIndex, false);
}
return -1;
}
/** Returns the first child element with the given name. */
private int getFirstChildElement(int nodeIndex, String elementName) {
int childIndex = getFirstChildElement(nodeIndex);
if (childIndex != -1) {
while (childIndex != -1) {
String nodeName = fDeferredDocumentImpl.getNodeNameString(childIndex, false);
if (nodeName.equals(elementName)) {
return childIndex;
}
childIndex = getNextSiblingElement(childIndex);
}
}
return -1;
}
/** Returns the next sibling element with the given name. */
private int getNextSiblingElement(int nodeIndex, String elementName) {
int siblingIndex = getNextSiblingElement(nodeIndex);
if (siblingIndex != -1) {
while (siblingIndex != -1) {
String nodeName = fDeferredDocumentImpl.getNodeNameString(siblingIndex, false);
if (nodeName.equals(elementName)) {
return siblingIndex;
}
siblingIndex = getNextSiblingElement(siblingIndex);
}
}
return -1;
}
/** Returns the first child element with the given name. */
private int getFirstChildElement(int nodeIndex, String elemName, String attrName, String attrValue) {
int childIndex = getFirstChildElement(nodeIndex, elemName);
if (childIndex != -1) {
while (childIndex != -1) {
int attrIndex = fDeferredDocumentImpl.getNodeValue(childIndex, false);
while (attrIndex != -1) {
String nodeName = fDeferredDocumentImpl.getNodeNameString(attrIndex, false);
if (nodeName.equals(attrName)) {
// REVISIT: Do we need to normalize the text? -Ac
int textIndex = fDeferredDocumentImpl.getFirstChild(attrIndex, false);
String nodeValue = fDeferredDocumentImpl.getNodeValueString(textIndex, false);
if (nodeValue.equals(attrValue)) {
return childIndex;
}
}
attrIndex = fDeferredDocumentImpl.getNextSibling(attrIndex, false);
}
childIndex = getNextSiblingElement(childIndex, elemName);
}
}
return -1;
}
/** Returns the next sibling element with the given name and attribute. */
private int getNextSiblingElement(int nodeIndex, String elemName, String attrName, String attrValue) {
int siblingIndex = getNextSiblingElement(nodeIndex, elemName);
if (siblingIndex != -1) {
int attributeNameIndex = fStringPool.addSymbol(attrName);
while (siblingIndex != -1) {
int attrIndex = fDeferredDocumentImpl.getNodeValue(siblingIndex, false);
while (attrIndex != -1) {
int attrValueIndex = fDeferredDocumentImpl.getNodeValue(attrIndex, false);
if (attrValue.equals(fStringPool.toString(attrValueIndex))) {
return siblingIndex;
}
attrIndex = fDeferredDocumentImpl.getNextSibling(attrIndex, false);
}
siblingIndex = getNextSiblingElement(siblingIndex, elemName);
}
}
return -1;
}
/**
* Copies the source tree into the specified place in a destination
* tree. The source node and its children are appended as children
* of the destination node.
* <p>
* <em>Note:</em> This is an iterative implementation.
*/
private void copyInto(Node src, int destIndex) throws Exception {
// for ignorable whitespace features
boolean domimpl = src != null && src instanceof DocumentImpl;
// placement variables
Node start = src;
Node parent = src;
Node place = src;
// traverse source tree
while (place != null) {
// copy this node
int nodeIndex = -1;
short type = place.getNodeType();
switch (type) {
case Node.CDATA_SECTION_NODE: {
boolean ignorable = domimpl && ((TextImpl)place).isIgnorableWhitespace();
nodeIndex = fDeferredDocumentImpl.createCDATASection(fStringPool.addString(place.getNodeValue()), ignorable);
break;
}
case Node.COMMENT_NODE: {
nodeIndex = fDeferredDocumentImpl.createComment(fStringPool.addString(place.getNodeValue()));
break;
}
case Node.ELEMENT_NODE: {
XMLAttrList attrList = null;
int handle = -1;
NamedNodeMap attrs = place.getAttributes();
if (attrs != null) {
int length = attrs.getLength();
if (length > 0) {
handle = fAttrList.startAttrList();
for (int i = 0; i < length; i++) {
Attr attr = (Attr)attrs.item(i);
String attrName = attr.getNodeName();
String attrValue = attr.getNodeValue();
fAttrList.addAttr(
fStringPool.addSymbol(attrName),
fStringPool.addString(attrValue),
fStringPool.addSymbol("CDATA"), // REVISIT
attr.getSpecified(),
false); // search
}
fAttrList.endAttrList();
attrList = fAttrList;
}
}
nodeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol(place.getNodeName()), attrList, handle);
break;
}
case Node.ENTITY_REFERENCE_NODE: {
nodeIndex = fDeferredDocumentImpl.createEntityReference(fStringPool.addSymbol(place.getNodeName()));
break;
}
case Node.PROCESSING_INSTRUCTION_NODE: {
nodeIndex = fDeferredDocumentImpl.createProcessingInstruction(fStringPool.addSymbol(place.getNodeName()), fStringPool.addString(place.getNodeValue()));
break;
}
case Node.TEXT_NODE: {
boolean ignorable = domimpl && ((TextImpl)place).isIgnorableWhitespace();
nodeIndex = fDeferredDocumentImpl.createTextNode(fStringPool.addString(place.getNodeValue()), ignorable);
break;
}
default: {
throw new IllegalArgumentException("PAR010 Can't copy node type, "+
type+" ("+
place.getNodeName()+')'
+"\n"+type+"\t"+place.getNodeName());
}
}
fDeferredDocumentImpl.appendChild(destIndex, nodeIndex);
// iterate over children
if (place.hasChildNodes()) {
parent = place;
place = place.getFirstChild();
destIndex = nodeIndex;
}
// advance
else {
place = place.getNextSibling();
while (place == null && parent != start) {
place = parent.getNextSibling();
parent = parent.getParentNode();
destIndex = fDeferredDocumentImpl.getParentNode(destIndex, false);
}
}
}
} // copyInto(Node,int)
/** Creates the content model elements for the deferred DOM tree. */
private int createDeferredContentModel(Node model) throws Exception {
int nodeType = model.getNodeType();
switch (nodeType) {
case Node.ELEMENT_NODE: {
NamedNodeMap attrs = model.getAttributes();
int handle = fAttrList.startAttrList();
int length = attrs.getLength();
for (int i = 0; i < length; i++) {
Attr attr = (Attr)attrs.item(i);
String attrName = attr.getNodeName();
String attrValue = attr.getNodeValue();
fAttrList.addAttr(
fStringPool.addSymbol(attrName),
fStringPool.addString(attrValue),
fStringPool.addSymbol((String)TYPES.get(attrName)),
attr.getSpecified(),
false); // search
}
fAttrList.endAttrList();
int modelIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol(model.getNodeName()), fAttrList, handle);
Node child = model.getFirstChild();
while (child != null) {
int childIndex = createDeferredContentModel(child);
fDeferredDocumentImpl.appendChild(modelIndex, childIndex);
child = child.getNextSibling();
}
return modelIndex;
}
case Node.TEXT_NODE: {
return fDeferredDocumentImpl.createTextNode(fStringPool.addString(model.getNodeValue()), false);
}
}
return -1;
} // createDeferredContentModel(Node):int
} // class DOMParser
|
the internalSubset parameter of createDocumentType has been
dropped during DOM Level 2 CR,
updated the code accordingly.
git-svn-id: 21df804813e9d3638e43477f308dd0be51e5f30f@315328 13f79535-47bb-0310-9956-ffa450edef68
|
src/org/apache/xerces/parsers/DOMParser.java
|
the internalSubset parameter of createDocumentType has been dropped during DOM Level 2 CR, updated the code accordingly.
|
|
Java
|
apache-2.0
|
5b7876936515552f8262e01f28d2be2dcfa98850
| 0
|
mgillian/uPortal,andrewstuart/uPortal,jonathanmtran/uPortal,ASU-Capstone/uPortal-Forked,andrewstuart/uPortal,EsupPortail/esup-uportal,mgillian/uPortal,pspaude/uPortal,drewwills/uPortal,ASU-Capstone/uPortal-Forked,phillips1021/uPortal,bjagg/uPortal,stalele/uPortal,phillips1021/uPortal,drewwills/uPortal,jl1955/uPortal5,GIP-RECIA/esup-uportal,jhelmer-unicon/uPortal,phillips1021/uPortal,MichaelVose2/uPortal,kole9273/uPortal,Mines-Albi/esup-uportal,ASU-Capstone/uPortal-Forked,EdiaEducationTechnology/uPortal,Jasig/SSP-Platform,EdiaEducationTechnology/uPortal,andrewstuart/uPortal,ASU-Capstone/uPortal-Forked,vbonamy/esup-uportal,andrewstuart/uPortal,GIP-RECIA/esco-portail,Jasig/SSP-Platform,chasegawa/uPortal,vertein/uPortal,bjagg/uPortal,MichaelVose2/uPortal,GIP-RECIA/esup-uportal,joansmith/uPortal,vbonamy/esup-uportal,joansmith/uPortal,ASU-Capstone/uPortal,groybal/uPortal,stalele/uPortal,joansmith/uPortal,drewwills/uPortal,bjagg/uPortal,chasegawa/uPortal,EsupPortail/esup-uportal,vbonamy/esup-uportal,doodelicious/uPortal,ASU-Capstone/uPortal-Forked,apetro/uPortal,jonathanmtran/uPortal,EsupPortail/esup-uportal,jameswennmacher/uPortal,EsupPortail/esup-uportal,vertein/uPortal,jhelmer-unicon/uPortal,drewwills/uPortal,Mines-Albi/esup-uportal,doodelicious/uPortal,GIP-RECIA/esup-uportal,GIP-RECIA/esup-uportal,mgillian/uPortal,kole9273/uPortal,apetro/uPortal,Mines-Albi/esup-uportal,kole9273/uPortal,vbonamy/esup-uportal,jl1955/uPortal5,ASU-Capstone/uPortal,jhelmer-unicon/uPortal,Jasig/SSP-Platform,phillips1021/uPortal,Jasig/uPortal-start,stalele/uPortal,groybal/uPortal,jl1955/uPortal5,apetro/uPortal,jameswennmacher/uPortal,groybal/uPortal,Jasig/SSP-Platform,MichaelVose2/uPortal,GIP-RECIA/esco-portail,EsupPortail/esup-uportal,kole9273/uPortal,ASU-Capstone/uPortal,apetro/uPortal,jl1955/uPortal5,Jasig/SSP-Platform,cousquer/uPortal,joansmith/uPortal,chasegawa/uPortal,joansmith/uPortal,GIP-RECIA/esco-portail,groybal/uPortal,EdiaEducationTechnology/uPortal,Jasig/uPortal,timlevett/uPortal,jameswennmacher/uPortal,Jasig/uPortal,vertein/uPortal,pspaude/uPortal,ChristianMurphy/uPortal,cousquer/uPortal,ChristianMurphy/uPortal,jhelmer-unicon/uPortal,timlevett/uPortal,pspaude/uPortal,chasegawa/uPortal,pspaude/uPortal,doodelicious/uPortal,ChristianMurphy/uPortal,vertein/uPortal,Mines-Albi/esup-uportal,jonathanmtran/uPortal,Jasig/uPortal,EdiaEducationTechnology/uPortal,ASU-Capstone/uPortal,jhelmer-unicon/uPortal,timlevett/uPortal,timlevett/uPortal,andrewstuart/uPortal,doodelicious/uPortal,GIP-RECIA/esup-uportal,MichaelVose2/uPortal,Mines-Albi/esup-uportal,jameswennmacher/uPortal,Jasig/uPortal-start,cousquer/uPortal,doodelicious/uPortal,phillips1021/uPortal,stalele/uPortal,groybal/uPortal,jl1955/uPortal5,ASU-Capstone/uPortal,jameswennmacher/uPortal,MichaelVose2/uPortal,apetro/uPortal,vbonamy/esup-uportal,kole9273/uPortal,chasegawa/uPortal,stalele/uPortal
|
/* Copyright 2001-2004 The JA-SIG Collaborative. All rights reserved.
* See license distributed with this file and
* available online at http://www.uportal.org/license.html
*/
package org.jasig.portal;
import java.util.Date;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* Base portal exception class.
* Information contained in this class allows ErrorChannel
* to handle errors gracefully.
* This class also reports itself to the ProblemsTable whenever it is instantiated.
* The Problems servlet displays recently reported PortalExceptions.
* @author Peter Kharchenko
* @version $Revision$
*/
public class PortalException extends Exception {
private final Log log = LogFactory.getLog(PortalException.class);
/**
* should the user be given an option to reinstantiate
* the channel in a given session
*/
boolean reinstantiable = true;
/**
* should the user be given an option to retry rendering
* that same channel instance
*/
boolean refreshable = true;
/**
* True if logging is pending on this exception instance
* (has not yet been logged but potentially will be). True
* if all the logging that ought to happen has happened.
*/
boolean logPending = true;
/**
* ErrorID categorizing this PortalException instance.
*/
ErrorID errorID = Errors.legacy;
/**
* Parameter to the ErrorID's template message.
*/
String parameter = null;
/**
* The time at which this PortalException instance was instantiated.
*/
Date timestamp = new Date();
/**
* Instantiate a generic PortalException.
* Instantiating a bare, no-message, no ErrorID, no frills
* PortalException is pretty anti-social. Wouldn't you rather
* use a constructor that provides more information?
*/
public PortalException() {
ProblemsTable.store(this);
}
/**
* Construct a new portal exception, recording an
* underlying cause.
*
* @param cause a <code>Throwable</code> causing this exception
*/
public PortalException(Throwable cause) {
super(cause);
ProblemsTable.store(this);
}
/**
* Creates a new <code>PortalException</code> instance,
* with a contained text message.
*
* @param msg describes exceptional condition
*/
public PortalException(String msg) {
super(msg);
ProblemsTable.store(this);
}
/**
* Instantiate a PortalException representing an instance of the
* type of error represented by the given ErrorID.
* @param errorid - type of error
*/
public PortalException(ErrorID errorid) {
super(errorid.getMessage());
this.errorID=errorid;
ProblemsTable.store(this);
}
/**
* Instantiate a PortalException with the given message and underlying cause.
* @param msg - message describing the error
* @param cause - underlying cause of the error
*/
public PortalException(String msg, Throwable cause) {
super(msg, cause);
ProblemsTable.store(this);
}
/**
* Instantiate a PortalException representing an instance of the type of error
* represented by the given ErrorID, with the given underlying cause.
* @param errorid - type of error
* @param cause - underlying cause of error.
*/
public PortalException(ErrorID errorid, Throwable cause) {
super(errorid.getMessage(), cause);
this.errorID=errorid;
ProblemsTable.store(this);
}
/**
* Check if user-mediated referesh is allowed.
* @return true if refresh allowed, false otherwise.
*/
public boolean isRefreshable() {
return this.refreshable;
}
/**
* Legacy support for old name of property accessor.
* @return isRefreshable()
* @deprecated use isRefreshable().
*/
public boolean allowRefresh() {
return isRefreshable();
}
/**
* Check if user-mediated reinstantiation is allowed.
* @return true if reinstantiation allowed, false otherwise
*/
public boolean isReinstantiable() {
return this.reinstantiable;
}
/**
* Legacy support for old name of property accessor
* @return isRinstantiable();
* @deprecated use isReinstantiable()
*/
public boolean allowReinstantiation() {
return isReinstantiable();
}
/**
* Retrieve an optionally recorded exception that
* caused the error.
* @return the cause if it is an Exception
* @deprecated - use Throwable.getCause()
*/
public Exception getRecordedException() {
Throwable cause = this.getCause();
if (cause != null && cause instanceof Exception)
return (Exception) cause;
return null;
}
/**
* Set if the user should be presented with an option
* to retry the same operation on the component that
* has generated the error.
*
* @param refresh a <code>boolean</code> value
*/
public void setRefreshable(boolean refresh) {
this.refreshable=refresh;
}
/**
* Set if the user should be presented with an option
* to reinstantiate the component (channel) that generated
* the error.
*
* @param reinstantiate a <code>boolean</code> value
*/
public void setReinstantiable(boolean reinstantiate) {
this.reinstantiable=reinstantiate;
}
/**
* Allows to record the exception that caused the error.
* The exception information can later be used in error
* reporting and user interaction.
*
* @param exc an <code>Exception</code> value
* @deprecated use initCause() instead.
*/
public void setRecordedException(Exception exc) {
try {
this.initCause(exc);
} catch (Throwable t) {
// legacy implementation was setting a simple JavaBean property
// which could never throw an exception.
// we emulate that exceptionless behavior here.
if (log.isWarnEnabled())
log.warn("Exception setting the recorded exception of [" + this + "] " +
"to [" + exc + "]", t);
}
}
/**
* Determine whether logging is pending on this PortalException.
* @return <code>true</code> if the log is pending, otherwise <code>false</code>
*/
public boolean isLogPending() {
return this.logPending;
}
/**
* Set whether logging is pending on this PortalException.
* @param b true if logging is pending
*/
public void setLogPending(boolean b) {
this.logPending = b;
}
/**
* Get the ErrorID representing the type of this error.
* @return the error ID
*/
public ErrorID getErrorID() {
return this.errorID;
}
/**
* Set the ErrorID categorizing this PortalException.
* @param errorID the ErrorID categorizing this PortalException.
*/
public void setErrorID(ErrorID errorID) {
this.errorID = errorID;
}
/**
* Get the parameter to the ErrorID template message.
* @return the parameter
*/
public String getParameter() {
return this.parameter;
}
/**
* Set the parameter to the ErrorID template message.
* @param string - parameter to ErrorID template message.
*/
public void setParameter(String string) {
this.parameter = string;
}
/**
* Instantiate a PortalException with the given message and refresh,
* reinstantiate state.
* @param msg - message describing the problem
* @param refresh - whether refresh is appropriate response
* @param reinstantiate - whether reinstantiate is appropriate response
*/
public PortalException(String msg, boolean refresh, boolean reinstantiate) {
super(msg);
this.setReinstantiable(reinstantiate);
this.setRefreshable(refresh);
ProblemsTable.store(this);
}
/**
* Instantiate a PortalException with the given message, underlying cause,
* refresh, and reinstantiate state.
* @param msg - message describing the problem
* @param cause - underlying cause of problem
* @param refresh - true if refresh is an appropriate response
* @param reinstantiate - true if reinstantiate is an appropriate response
*/
public PortalException(String msg, Throwable cause,
boolean refresh, boolean reinstantiate) {
super(msg, cause);
this.setReinstantiable(reinstantiate);
this.setRefreshable(refresh);
ProblemsTable.store(this);
}
/**
* Get the Date at which this PortalException instance was instantiated.
* @return Returns the timestamp.
*/
public Date getTimestamp() {
return this.timestamp;
}
}
|
source/org/jasig/portal/PortalException.java
|
/**
* Copyright 2001-2004 The JA-SIG Collaborative. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the JA-SIG Collaborative
* (http://www.jasig.org/)."
*
* THIS SOFTWARE IS PROVIDED BY THE JA-SIG COLLABORATIVE "AS IS" AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE JA-SIG COLLABORATIVE OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
package org.jasig.portal;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.util.Date;
/**
* Base portal exception class.
* Information contained in this class allows ErrorChannel
* to handle errors gracefully.
* This class also reports itself to the ProblemsTable whenever it is instantiated.
* The Problems servlet displays recently reported PortalExceptions.
* @author Peter Kharchenko
* @version $Revision$
*/
public class PortalException extends Exception {
// should the user be given an option to reinstantiate
// the channel in a given session ?
boolean reinstantiable=true;
// should the user be given an option to retry rendering
// that same channel instance ?
boolean refreshable=true;
boolean logPending = true;
ErrorID errorID = Errors.legacy;
String parameter = null;
Date timestamp = new Date();
// Precursor to Throwable.cause property
// for Java < 1.4
Exception recordedException;
public PortalException() {
ProblemsTable.store(this);
}
/**
* Construct a new portal exception, recording the
* exception that originally caused the error.
*
* @param exc an <code>Exception</code> value
*/
public PortalException(Exception exc) {
this.recordedException=exc;
ProblemsTable.store(this);
}
/**
* Creates a new <code>PortalException</code> instance,
* with a contained text message.
*
* @param msg a <code>String</code> value
*/
public PortalException(String msg) {
super(msg);
ProblemsTable.store(this);
}
public PortalException(ErrorID errorid) {
super(errorid.getMessage());
this.errorID=errorid;
ProblemsTable.store(this);
}
public PortalException(String msg,Exception exc) {
super(msg);
this.recordedException=exc;
ProblemsTable.store(this);
}
public PortalException(ErrorID errorid, Exception exc) {
super(errorid.getMessage());
this.errorID=errorid;
this.recordedException=exc;
ProblemsTable.store(this);
}
/**
* Check if user-mediated referesh is allowed.
*/
public boolean isRefreshable() {
return refreshable;
}
/**
* Legacy support for badly named property accessor
*
* @return isRefreshable()
*/
public boolean allowRefresh() {
return isRefreshable();
}
/**
* Check if user-mediated reinstantiation is allowed.
*
* @return a <code>boolean</code> value
*/
public boolean isReinstantiable() {
return reinstantiable;
}
/**
* Legacy support for badly named property accessor
* @return isRinstantiable();
*/
public boolean allowReinstantiation() {
return isReinstantiable();
}
/**
* Retrieve an optionally recorded exception that
* caused the error.
*
* @return an <code>Exception</code> value
*/
public Exception getRecordedException() {
return this.recordedException;
}
/**
* Set if the user should be presented with an option
* to retry the same operation on the component that
* has generated the error.
*
* @param refresh a <code>boolean</code> value
*/
public void setRefreshable(boolean refresh) {
this.refreshable=refresh;
}
/**
* Set if the user should be presented with an option
* to reinstantiate the component (channel) that generated
* the error.
*
* @param reinstantiate a <code>boolean</code> value
*/
public void setReinstantiable(boolean reinstantiate) {
this.reinstantiable=reinstantiate;
}
/**
* Allows to record the exception that caused the error.
* The exception information can later be used in error
* reporting and user interaction.
*
* @param exc an <code>Exception</code> value
*/
public void setRecordedException(Exception exc) {
this.recordedException=exc;
}
/**
* @return <code>true</code> if the log is pending, otherwise <code>false</code>
*/
public boolean isLogPending() {
return logPending;
}
/**
* @param b
*/
public void setLogPending(boolean b) {
logPending = b;
}
/**
* @return the error ID
*/
public ErrorID getErrorID() {
return errorID;
}
/**
* @param errorID
*/
public void setErrorID(ErrorID errorID) {
this.errorID = errorID;
}
/**
* @return the parameter
*/
public String getParameter() {
return parameter;
}
/**
* @param string
*/
public void setParameter(String string) {
parameter = string;
}
public PortalException(String msg, boolean refresh, boolean reinstantiate) {
super(msg);
this.setReinstantiable(reinstantiate);
this.setRefreshable(refresh);
ProblemsTable.store(this);
}
public PortalException(String msg, Exception exc, boolean refresh, boolean reinstantiate) {
this(msg,refresh,reinstantiate);
this.setRecordedException(exc);
ProblemsTable.store(this);
}
/**
* Override <code>Exception</code> getMessage() method to
* append the recorded exception message, if applicable
*
* @return the message
*/
public String getMessage(){
StringBuffer sb = new StringBuffer(String.valueOf(super.getMessage()));
Exception ex = getRecordedException();
if (ex != null) {
String lmsg = ex.getMessage();
if (lmsg!=null) {
sb.append("\n [based on exception: ");
sb.append(lmsg);
sb.append("]");
}
}
return sb.toString();
}
/**
* Overrides <code>Exception</code> printStackTrace() method
*/
public void printStackTrace(){
this.printStackTrace(System.out);
}
/**
* Overrides <code>Exception</code> printStackTrace(PrintWriter writer)
* method to print recorded exception stack trace if applicable
*/
public void printStackTrace(PrintWriter writer){
if (getRecordedException()!=null){
getRecordedException().printStackTrace(writer);
}
else{
super.printStackTrace(writer);
}
}
/**
* Overrides <code>Exception</code> printStackTrace(PrintStream stream) method
*/
public void printStackTrace(PrintStream stream){
this.printStackTrace(new PrintWriter(stream,true));
}
/**
* @return Returns the timestamp.
*/
public Date getTimestamp() {
return timestamp;
}
}
|
refactored PortalException to support JDK 1.4 chaining Throwables.
Changed copyright notice to new shorter format.
git-svn-id: 477788cc2a8229a747c5b8073e47c1d0f6ec0604@9397 f5dbab47-78f9-eb45-b975-e544023573eb
|
source/org/jasig/portal/PortalException.java
|
refactored PortalException to support JDK 1.4 chaining Throwables. Changed copyright notice to new shorter format.
|
|
Java
|
apache-2.0
|
0e8aea2ca071c690748f2879692e78a53c1a26b4
| 0
|
gnb/voldemort,bhasudha/voldemort,arunthirupathi/voldemort,voldemort/voldemort,null-exception/voldemort,jeffpc/voldemort,stotch/voldemort,bhasudha/voldemort,PratikDeshpande/voldemort,FelixGV/voldemort,HB-SI/voldemort,HB-SI/voldemort,stotch/voldemort,PratikDeshpande/voldemort,stotch/voldemort,FelixGV/voldemort,jwlent55/voldemort,medallia/voldemort,voldemort/voldemort,arunthirupathi/voldemort,medallia/voldemort,FelixGV/voldemort,voldemort/voldemort,bitti/voldemort,HB-SI/voldemort,null-exception/voldemort,gnb/voldemort,jalkjaer/voldemort,LeoYao/voldemort,medallia/voldemort,birendraa/voldemort,squarY/voldemort,arunthirupathi/voldemort,arunthirupathi/voldemort,null-exception/voldemort,jalkjaer/voldemort,bitti/voldemort,FelixGV/voldemort,LeoYao/voldemort,jeffpc/voldemort,PratikDeshpande/voldemort,medallia/voldemort,squarY/voldemort,bhasudha/voldemort,LeoYao/voldemort,FelixGV/voldemort,jalkjaer/voldemort,jwlent55/voldemort,jeffpc/voldemort,LeoYao/voldemort,rickbw/voldemort,dallasmarlow/voldemort,cshaxu/voldemort,jalkjaer/voldemort,HB-SI/voldemort,bitti/voldemort,dallasmarlow/voldemort,bitti/voldemort,rickbw/voldemort,dallasmarlow/voldemort,squarY/voldemort,PratikDeshpande/voldemort,rickbw/voldemort,cshaxu/voldemort,stotch/voldemort,FelixGV/voldemort,bitti/voldemort,mabh/voldemort,jwlent55/voldemort,squarY/voldemort,squarY/voldemort,jeffpc/voldemort,mabh/voldemort,LeoYao/voldemort,gnb/voldemort,arunthirupathi/voldemort,jalkjaer/voldemort,mabh/voldemort,mabh/voldemort,bitti/voldemort,bitti/voldemort,PratikDeshpande/voldemort,FelixGV/voldemort,mabh/voldemort,birendraa/voldemort,medallia/voldemort,dallasmarlow/voldemort,cshaxu/voldemort,birendraa/voldemort,cshaxu/voldemort,jeffpc/voldemort,bhasudha/voldemort,dallasmarlow/voldemort,jalkjaer/voldemort,rickbw/voldemort,cshaxu/voldemort,rickbw/voldemort,stotch/voldemort,voldemort/voldemort,HB-SI/voldemort,LeoYao/voldemort,voldemort/voldemort,birendraa/voldemort,cshaxu/voldemort,null-exception/voldemort,jalkjaer/voldemort,null-exception/voldemort,squarY/voldemort,jwlent55/voldemort,jeffpc/voldemort,gnb/voldemort,arunthirupathi/voldemort,rickbw/voldemort,mabh/voldemort,birendraa/voldemort,null-exception/voldemort,jwlent55/voldemort,gnb/voldemort,voldemort/voldemort,dallasmarlow/voldemort,medallia/voldemort,squarY/voldemort,jwlent55/voldemort,arunthirupathi/voldemort,stotch/voldemort,gnb/voldemort,bhasudha/voldemort,bhasudha/voldemort,birendraa/voldemort,HB-SI/voldemort,voldemort/voldemort,PratikDeshpande/voldemort
|
/*
* Copyright 2008-2009 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package voldemort.versioning;
import java.io.Serializable;
import java.util.Comparator;
import voldemort.utils.Utils;
import com.google.common.base.Objects;
/**
* A wrapper for an object that adds a Version.
*
* @author jay
*
*/
public final class Versioned<T> implements Serializable {
private static final long serialVersionUID = 1;
private VectorClock version;
private volatile T object;
public Versioned(T object) {
this(object, new VectorClock());
}
public Versioned(T object, Version version) {
this.version = version == null ? new VectorClock() : (VectorClock) version;
this.object = object;
}
public Version getVersion() {
return version;
}
public T getValue() {
return object;
}
public void setObject(T object) {
this.object = object;
}
@Override
public boolean equals(Object o) {
if(o == this)
return true;
else if(!(o instanceof Versioned<?>))
return false;
Versioned<?> versioned = (Versioned<?>) o;
return Objects.equal(getVersion(), versioned.getVersion())
&& Utils.deepEquals(getValue(), versioned.getValue());
}
@Override
public int hashCode() {
return 31 + version.hashCode() + 31 * object.hashCode();
}
@Override
public String toString() {
return "[" + object + ", " + version + "]";
}
/**
* Create a clone of this Versioned object such that the object pointed to
* is the same, but the VectorClock and Versioned wrapper is a shallow copy.
*/
public Versioned<T> cloneVersioned() {
return new Versioned<T>(this.getValue(), this.version.clone());
}
public static <S> Versioned<S> value(S s) {
return new Versioned<S>(s, new VectorClock());
}
public static <S> Versioned<S> value(S s, Version v) {
return new Versioned<S>(s, v);
}
public static final class HappenedBeforeComparator<S> implements Comparator<Versioned<S>> {
public int compare(Versioned<S> v1, Versioned<S> v2) {
Occured occured = v1.getVersion().compare(v2.getVersion());
if(occured == Occured.BEFORE)
return -1;
else if(occured == Occured.AFTER)
return 1;
else
return 0;
}
}
}
|
src/java/voldemort/versioning/Versioned.java
|
/*
* Copyright 2008-2009 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package voldemort.versioning;
import java.io.Serializable;
import java.util.Comparator;
import voldemort.utils.Utils;
import com.google.common.base.Objects;
/**
* A wrapper for an object that adds a Version.
*
* @author jay
*
*/
public final class Versioned<T> implements Serializable {
private static final long serialVersionUID = 1;
private VectorClock version;
private volatile T object;
public Versioned(T object) {
this(object, new VectorClock());
}
public Versioned(T object, Version version) {
this.version = version == null ? new VectorClock() : (VectorClock) version;
this.object = object;
}
public Version getVersion() {
return version;
}
public T getValue() {
return object;
}
public void setObject(T object) {
this.object = object;
}
@Override
public boolean equals(Object o) {
if(o == this)
return true;
else if(!(o instanceof Versioned))
return false;
Versioned<?> versioned = (Versioned<?>) o;
return Objects.equal(getVersion(), versioned.getVersion())
&& Utils.deepEquals(getValue(), versioned.getValue());
}
@Override
public int hashCode() {
return 31 + version.hashCode() + 31 * object.hashCode();
}
@Override
public String toString() {
return "[" + object + ", " + version + "]";
}
/**
* Create a clone of this Versioned object such that the object pointed to
* is the same, but the VectorClock and Versioned wrapper is a shallow copy.
*/
public Versioned<T> cloneVersioned() {
return new Versioned<T>(this.getValue(), this.version.clone());
}
public static <S> Versioned<S> value(S s) {
return new Versioned<S>(s, new VectorClock());
}
public static <S> Versioned<S> value(S s, Version v) {
return new Versioned<S>(s, v);
}
public static final class HappenedBeforeComparator<S> implements Comparator<Versioned<S>> {
public int compare(Versioned<S> v1, Versioned<S> v2) {
Occured occured = v1.getVersion().compare(v2.getVersion());
if(occured == Occured.BEFORE)
return -1;
else if(occured == Occured.AFTER)
return 1;
else
return 0;
}
}
}
|
Fix raw type compiler warning in instanceof check.
|
src/java/voldemort/versioning/Versioned.java
|
Fix raw type compiler warning in instanceof check.
|
|
Java
|
bsd-3-clause
|
a1747fc64b45457e7282c4a9539939bfa8559ba4
| 0
|
OEP/rainwave-android
|
package cc.rainwave.android.api;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Log;
import cc.rainwave.android.R;
import cc.rainwave.android.Rainwave;
import cc.rainwave.android.api.types.Album;
import cc.rainwave.android.api.types.Artist;
import cc.rainwave.android.api.types.Event;
import cc.rainwave.android.api.types.RainwaveException;
import cc.rainwave.android.api.types.Song;
import cc.rainwave.android.api.types.SongRating;
import cc.rainwave.android.api.types.Station;
import cc.rainwave.android.api.types.User;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonParseException;
import com.google.gson.JsonParser;
import com.google.gson.stream.JsonReader;
public class Session {
private static final String TAG = "Session";
private Context mContext;
private int mStation = 1;
private String mUserId;
private String mKey;
private URL mBaseUrl;
private Bitmap mCurrentAlbumArt;
private Event mCurrentEvent;
private Event[] mNextEvents;
private Event[] mEventHistory;
private Song[] mRequests;
private User mUser;
private Station[] mStations;
private Artist[] mArtists;
private Album[] mAlbums;
private int mLastVoteId = -1;
/** Estimate of time delta in seconds between server time and local time. */
private long mDrift = 0;
/** Can't instantiate directly */
private Session() { }
public void info() throws RainwaveException {
final String path = "info";
try {
final JsonElement element = get(path);
updateSchedules(element);
}
catch(final JsonParseException exc) {
throw wrapException(exc, path);
}
}
public void sync() throws RainwaveException {
final String path = "sync";
// include a last known event id so the server can rush information to
// us if we're behind
String[] args = null;
if(getCurrentEvent() != null) {
args = new String[] {"known_event_id", String.valueOf(getCurrentEvent().getId())};
}
else {
args = new String[0];
}
try {
final JsonElement element = post(path, args);
updateSchedules(element);
}
catch(final JsonParseException exc) {
throw wrapException(exc, path);
}
}
/** Update schedules if we are still listening to their updates. */
private void updateSchedules(final JsonElement root) {
Event newCurrent = getIfExists(root, "sched_current", Event.class, mCurrentEvent);
// It's possible that someone could have changed the station and this is
// and update for a previous station we don't need any more.
//
// FIXME: This is pretty bad practice -- is there another way?
if(newCurrent.getStationId() != getStationId()) {
return;
}
mCurrentEvent = newCurrent;
mNextEvents = getIfExists(root, "sched_next", Event[].class, mNextEvents);
mEventHistory = getIfExists(root, "sched_history", Event[].class, mEventHistory);
mUser = getIfExists(root, "user", User.class, mUser);
mRequests = getIfExists(root, "requests", Song[].class, mRequests);
// This does some checking to see if the "last_vote" reported by the api actually belongs
// to the current election. If it does, it accepts the ID, otherwise it is set to -1.
if(JsonHelper.hasMember(root, "vote_result")) {
final JsonElement child = JsonHelper.getChild(root, "vote_result");
final int elecId = JsonHelper.getInt(child, "elec_id");
if(elecId != mCurrentEvent.getId()) {
mLastVoteId = JsonHelper.getInt(JsonHelper.getChild(root, "vote_result"), "entry_id");
}
else {
mLastVoteId = -1;
}
}
}
private static <T> T getIfExists(final JsonElement root, final String name, Class<T> classOfT, T defaultValue) {
if(JsonHelper.hasMember(root, name)) {
final Gson gson = getGson();
return gson.fromJson(JsonHelper.getChild(root, name), classOfT);
}
return defaultValue;
}
public SongRating rateSong(int songId, float rating)
throws RainwaveException {
final String path = "rate";
final String returns = "rate_result";
rating = Math.max(1.0f, Math.min(rating, 5.0f));
return requestObject(Method.POST, path, returns, true, SongRating.class,
"song_id", String.valueOf(songId),
"rating", String.valueOf(rating)
);
}
public void vote(int elecId)
throws RainwaveException {
final String path = "vote";
final String returns = "vote_result";
try {
final JsonElement element = post(path,
"entry_id", String.valueOf(elecId)
);
checkError(JsonHelper.getChild(element, returns));
}
catch(final JsonParseException exc) {
throw wrapException(exc, path);
}
// checkError() would have thrown if there was an error
mLastVoteId = elecId;
}
public Station[] fetchStations() throws RainwaveException {
final String path = "stations";
final String returns = "stations";
if(hasStations()) {
return cloneStations();
}
mStations = requestObject(Method.POST, path, returns, false, Station[].class);
return cloneStations();
}
/**
* Fetch a list of all the albums. Returns a cached version if available.
* @return array of Albums
* @throws RainwaveException in case of a problem understanding the response
*/
public Album[] fetchAlbums() throws RainwaveException {
final String path = "all_albums";
final String returns = "all_albums";
mAlbums = requestObject(Method.POST, path, returns, false, Album[].class);
return mAlbums;
}
/**
* Get album data.
*
* @return all albums if cached, or null otherwise
*/
public Album[] getAlbums() {
return mAlbums;
}
/**
* Fetch a list of all artists.
*
* @return a list of all artists
* @throws RainwaveException in case of problem understanding the response
*/
public Artist[] fetchArtists() throws RainwaveException {
final String path = "all_artists";
final String returns = "all_artists";
mArtists = requestObject(Method.POST, path, returns, false, Artist[].class);
return mArtists;
}
/**
* Get artist data.
*
* @return all artist data if cached, or null otherwise
*/
public Artist[] getArtists() {
return mArtists;
}
public Artist fetchDetailedArtist(int artist_id) throws RainwaveException {
final String path = "artist";
final String returns = "artist";
return requestObject(Method.POST, path, returns, false, Artist.class,
"id", String.valueOf(artist_id)
);
}
public Album fetchDetailedAlbum(int album_id) throws RainwaveException {
final String path = "album";
final String returns = "album";
return requestObject(Method.POST, path, returns, false, Album.class,
"id", String.valueOf(album_id)
);
}
public Song[] submitRequest(int song_id) throws RainwaveException {
final String path = "request";
final JsonElement root = post(path, "song_id", String.valueOf(song_id));
try {
final Gson gson = getGson();
checkError(JsonHelper.getChild(root, "request_result"));
return gson.fromJson(JsonHelper.getChild(root, "requests"), Song[].class);
}
catch(final JsonParseException exc) {
throw wrapException(exc, path);
}
}
/**
* Fetch a full resolution album art.
*
* @param path base url to album art
* @return bitmap of album art
* @throws IOException
*/
public Bitmap fetchAlbumArt(String path) throws IOException {
return fetchAlbumArtHelper(path + ".jpg");
}
/**
* Fetch a minimum width album art. The returned bitmap is guaranteed to
* be at least the requested width.
*
* @param path base url to album art
* @param width minimum width required
* @return bitmap of album art
* @throws IOException
*/
public Bitmap fetchAlbumArt(String path, int width) throws IOException {
if(width <= 120) {
return fetchAlbumArtHelper(path + "_120.jpg");
}
else if(width <= 240) {
return fetchAlbumArtHelper(path + "_240.jpg");
}
else {
return fetchAlbumArt(path);
}
}
private Bitmap fetchAlbumArtHelper(String path) throws IOException {
URL url = new URL(getUrl(path));
Log.d(TAG, "GET " + url.toString());
InputStream is = url.openStream();
mCurrentAlbumArt = BitmapFactory.decodeStream(is);
return mCurrentAlbumArt;
}
/**
* Returns the current album art.
* @return album art bitmap if any, null otherwise
*/
public Bitmap getCurrentAlbumArt() {
return mCurrentAlbumArt;
}
/**
* Clears the current album art. Sets it to null.
*/
public void clearCurrentAlbumArt() {
mCurrentAlbumArt = null;
}
public Song[] reorderRequests(Song requests[])
throws RainwaveException {
final String path = "order_requests";
final JsonElement root = post(path,
"order", Rainwave.makeRequestQueueString(requests)
);
try {
final Gson gson = getGson();
checkError(JsonHelper.getChild(root, "order_requests_result"));
return gson.fromJson(JsonHelper.getChild(root, "requests"), Song[].class);
}
catch(final JsonParseException exc) {
throw wrapException(exc, path);
}
}
public void deleteRequest(Song request)
throws RainwaveException {
post("delete_request",
"song_id", String.valueOf(request.getId())
);
}
public void setUserInfo(String userId, String key) {
mUserId = userId;
mKey = key;
}
public void clearUserInfo() {
setUserInfo(null, null);
}
public Song[] cloneRequests() {
return mRequests.clone();
}
public boolean hasStations() {
return mStations != null;
}
public boolean hasLastVote() {
return mLastVoteId >= 0;
}
/**
* Get the election entry ID of the last known vote.
*
* @return election entry ID of last known vote, or -1 if it is unknown
*/
public int getLastVoteId() {
return mLastVoteId;
}
/**
* Get the difference in seconds between server time and client time. That
* is, serverTime - clientTime.
*
* @return the drift
*/
public long getDrift() {
return mDrift;
}
public Station[] cloneStations() {
return mStations.clone();
}
public void setStation(int stationId) {
mStation = stationId;
Rainwave.putLastStation(mContext, stationId);
}
public Station getStation(int stationId) {
for(int i = 0; i < mStations.length; i++) {
if(mStations[i].getId() == stationId) {
return mStations[i];
}
}
return null;
}
public String getUrl() {
return mBaseUrl.toString();
}
public int getStationId() {
return mStation;
}
public Event getCurrentEvent() {
return mCurrentEvent;
}
public Event getNextEvent() {
return mNextEvents[0];
}
public boolean isTunedIn() {
return mUser != null && mUser.getTunedIn();
}
/**
* Returns true if the server sent a request list on the last sync() or info().
* @return
*/
public boolean hasRequests() {
return mRequests != null;
}
/**
* Returns true if we have set credentials via setUserInfo().
* @return
*/
public boolean hasCredentials() {
return mUserId != null && mKey != null && mUserId.length() > 0 && mKey.length() > 0;
}
/**
* Returns true if the server thinks we are authenticated.
*
* @return true if authenticated, false otherwise
*/
public boolean isAuthenticated() {
return mUser != null;
}
/**
* Returns true if we require a sync. This can be because no current
* event is available or the current event is in the past.
*
* @return true if a sync is required, false otherwise
*/
public boolean requiresSync() {
if(getCurrentEvent() == null) {
return true;
}
long endTime = getCurrentEvent().getEnd() - getDrift();
long utc = System.currentTimeMillis() / 1000;
return utc > endTime;
}
private JsonElement get(String path, String... params)
throws RainwaveException {
return request(Method.GET, path, params);
}
private JsonElement post(String path, String... params)
throws RainwaveException {
return request(Method.POST, path, params);
}
private JsonElement request(final Method method, String path, String... params)
throws RainwaveException {
// Construct arguments
Arguments httpArgs = new Arguments(params);
httpArgs.put(NAME_STATION, String.valueOf(mStation));
if(this.hasCredentials()) {
httpArgs.put(NAME_USERID, mUserId);
httpArgs.put(NAME_KEY, mKey);
}
HttpURLConnection conn = null;
final Resources r = mContext.getResources();
long requestEnd;
try {
switch(method) {
case POST:
conn = HttpHelper.makePost(mBaseUrl, path, httpArgs.encode());
break;
case GET:
conn = HttpHelper.makeGet(mBaseUrl, String.format("%s?%s", path, httpArgs.encode()));
break;
default:
throw new IllegalArgumentException("Unhandled HTTP method!");
}
final int statusCode = conn.getResponseCode();
switch(statusCode) {
case HttpURLConnection.HTTP_FORBIDDEN:
throw new RainwaveException(r.getString(R.string.msg_forbidden), statusCode);
}
// log timestamp to calculate drift
requestEnd = System.currentTimeMillis() / 1000;
}
catch(IOException exc) {
throw new RainwaveException(r.getString(R.string.msg_genericError), exc);
}
final JsonElement root;
try {
JsonParser parser = new JsonParser();
final InputStream is = conn.getInputStream();
final InputStreamReader reader = new InputStreamReader(is);
root = parser.parse(reader);
}
catch(IOException exc) {
throw new RainwaveException(r.getString(R.string.msg_genericError), exc);
}
// most every endpoint returns api_info, so we can try and update
// drift whenever possible
if(JsonHelper.hasMember(root, "api_info")) {
final JsonElement api_info = JsonHelper.getChild(root, "api_info");
if(JsonHelper.hasMember(api_info, "time")) {
mDrift = JsonHelper.getLong(api_info, "time") - requestEnd;
}
}
return root;
}
/**
* Fetches a single object from the API.
*
* @param method either Method.GET or Method.POST
* @param path endpoint name
* @param name member name
* @param checkError check for the success in a member named "success"
* @param classOfT
* @param params
* @return
* @throws RainwaveException
* @throws IOException
*/
private <T> T requestObject(
final Method method, final String path, final String name,
final boolean checkError, Class<T> classOfT, final String... params
) throws RainwaveException {
// Convert the json into Java objects.
Gson gson = getGson();
final JsonElement json = request(method, path, params);
try {
final JsonElement element = JsonHelper.getChild(json, name);
if(checkError) {
checkError(element);
}
return gson.fromJson(element, classOfT);
}
catch(JsonParseException e) {
throw wrapException(e, path);
}
}
/**
* Check for errors in a JSON response. Checks the value of a member
* name called "success" and if it is false, throw an error containing
* the contents of a field called "text".
*
* @param element JSON object to check
* @throws RainwaveException if an error is detected
*/
private void checkError(final JsonElement element) throws RainwaveException {
if(!JsonHelper.getBoolean(element, "success")) {
throw new RainwaveException(
JsonHelper.getString(element, "text", mContext.getString(R.string.msg_genericError)),
RainwaveException.STATUS_UNKNOWN,
JsonHelper.getInt(element, "code", RainwaveException.ERROR_UNKNOWN)
);
}
}
private RainwaveException wrapException(final JsonParseException exc, final String path)
throws RainwaveException {
Resources r = mContext.getResources();
String msg = String.format(r.getString(R.string.msgfmt_parseError), path, exc.getMessage());
throw new RainwaveException(msg, exc);
}
private String getUrl(String path) throws MalformedURLException {
if (path == null || path.length() == 0)
return mBaseUrl.toString();
final URL url = new URL(mBaseUrl, path);
return url.toString();
}
/**
* Restore a session from a previously saved one.
*
* @param ctx the new context for the session
*/
public void unpickle(Context ctx) {
mContext = ctx;
mStation = Rainwave.getLastStation(ctx, mStation);
setUserInfo(Rainwave.getUserId(ctx), Rainwave.getKey(ctx));
try {
mBaseUrl = new URL(Rainwave.getUrl(ctx));
} catch (MalformedURLException e) {
mBaseUrl = Rainwave.DEFAULT_URL;
}
}
public void pickle(Context ctx) {
mContext = ctx;
Rainwave.putLastStation(mContext, mStation);
Rainwave.putUserId(mContext, mUserId);
Rainwave.putKey(mContext, mKey);
// TODO: Rainwave.putUrl() ?
}
/** The singleton */
private static Session sInstance;
public static Session getInstance() {
if(sInstance == null) {
sInstance = new Session();
}
return sInstance;
}
private static Gson getGson() {
GsonBuilder builder = new GsonBuilder();
builder.registerTypeAdapter(Album.class, new Album.Deserializer());
builder.registerTypeAdapter(Song.class, new Song.Deserializer());
builder.registerTypeAdapter(Event.class, new Event.Deserializer());
builder.registerTypeAdapter(Artist.class, new Artist.Deserializer());
builder.registerTypeAdapter(User.class, new User.Deserializer());
builder.registerTypeAdapter(Station.class, new Station.Deserializer());
builder.registerTypeAdapter(SongRating.class, new SongRating.Deserializer());
builder.registerTypeAdapter(SongRating.AlbumRating.class, new SongRating.AlbumRating.Deserializer());
return builder.create();
}
private static enum Method {
GET, POST
}
public static final String
NAME_STATION = "sid",
NAME_USERID = "user_id",
NAME_KEY = "key";
}
|
src/cc/rainwave/android/api/Session.java
|
package cc.rainwave.android.api;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.util.Log;
import cc.rainwave.android.R;
import cc.rainwave.android.Rainwave;
import cc.rainwave.android.api.types.Album;
import cc.rainwave.android.api.types.Artist;
import cc.rainwave.android.api.types.Event;
import cc.rainwave.android.api.types.RainwaveException;
import cc.rainwave.android.api.types.Song;
import cc.rainwave.android.api.types.SongRating;
import cc.rainwave.android.api.types.Station;
import cc.rainwave.android.api.types.User;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonElement;
import com.google.gson.JsonParseException;
import com.google.gson.JsonParser;
import com.google.gson.stream.JsonReader;
public class Session {
private static final String TAG = "Session";
private Context mContext;
private int mStation = 1;
private String mUserId;
private String mKey;
private URL mBaseUrl;
private Bitmap mCurrentAlbumArt;
private Event mCurrentEvent;
private Event[] mNextEvents;
private Event[] mEventHistory;
private Song[] mRequests;
private User mUser;
private Station[] mStations;
private Artist[] mArtists;
private Album[] mAlbums;
private int mLastVoteId = -1;
/** Estimate of time delta in seconds between server time and local time. */
private long mDrift = 0;
/** Can't instantiate directly */
private Session() { }
public void info() throws RainwaveException {
final String path = "info";
try {
final JsonElement element = get(path);
updateSchedules(element);
}
catch(final JsonParseException exc) {
throw wrapException(exc, path);
}
}
public void sync() throws RainwaveException {
final String path = "sync";
// include a last known event id so the server can rush information to
// us if we're behind
String[] args = null;
if(getCurrentEvent() != null) {
args = new String[] {"known_event_id", String.valueOf(getCurrentEvent().getId())};
}
else {
args = new String[0];
}
try {
final JsonElement element = post(path, args);
updateSchedules(element);
}
catch(final JsonParseException exc) {
throw wrapException(exc, path);
}
}
/** Update schedules if we are still listening to their updates. */
private void updateSchedules(final JsonElement root) {
Event newCurrent = getIfExists(root, "sched_current", Event.class, mCurrentEvent);
// It's possible that someone could have changed the station and this is
// and update for a previous station we don't need any more.
//
// FIXME: This is pretty bad practice -- is there another way?
if(newCurrent.getStationId() != getStationId()) {
return;
}
mCurrentEvent = newCurrent;
mNextEvents = getIfExists(root, "sched_next", Event[].class, mNextEvents);
mEventHistory = getIfExists(root, "sched_history", Event[].class, mEventHistory);
mUser = getIfExists(root, "user", User.class, mUser);
mRequests = getIfExists(root, "requests", Song[].class, mRequests);
// This does some checking to see if the "last_vote" reported by the api actually belongs
// to the current election. If it does, it accepts the ID, otherwise it is set to -1.
if(JsonHelper.hasMember(root, "vote_result")) {
final JsonElement child = JsonHelper.getChild(root, "vote_result");
final int elecId = JsonHelper.getInt(child, "elec_id");
if(elecId != mCurrentEvent.getId()) {
mLastVoteId = JsonHelper.getInt(JsonHelper.getChild(root, "vote_result"), "entry_id");
}
else {
mLastVoteId = -1;
}
}
}
private static <T> T getIfExists(final JsonElement root, final String name, Class<T> classOfT, T defaultValue) {
if(JsonHelper.hasMember(root, name)) {
final Gson gson = getGson();
return gson.fromJson(JsonHelper.getChild(root, name), classOfT);
}
return defaultValue;
}
public SongRating rateSong(int songId, float rating)
throws RainwaveException {
final String path = "rate";
final String returns = "rate_result";
rating = Math.max(1.0f, Math.min(rating, 5.0f));
return requestObject(Method.POST, path, returns, true, SongRating.class,
"song_id", String.valueOf(songId),
"rating", String.valueOf(rating)
);
}
public void vote(int elecId)
throws RainwaveException {
final String path = "vote";
final String returns = "vote_result";
try {
final JsonElement element = post(path,
"entry_id", String.valueOf(elecId)
);
checkError(JsonHelper.getChild(element, returns));
}
catch(final JsonParseException exc) {
throw wrapException(exc, path);
}
// checkError() would have thrown if there was an error
mLastVoteId = elecId;
}
public Station[] fetchStations() throws RainwaveException {
final String path = "stations";
final String returns = "stations";
if(hasStations()) {
return cloneStations();
}
mStations = requestObject(Method.POST, path, returns, false, Station[].class);
return cloneStations();
}
/**
* Fetch a list of all the albums. Returns a cached version if available.
* @return array of Albums
* @throws RainwaveException in case of a problem understanding the response
*/
public Album[] fetchAlbums() throws RainwaveException {
final String path = "all_albums";
final String returns = "all_albums";
mAlbums = requestObject(Method.POST, path, returns, false, Album[].class);
return mAlbums;
}
/**
* Get album data.
*
* @return all albums if cached, or null otherwise
*/
public Album[] getAlbums() {
return mAlbums;
}
/**
* Fetch a list of all artists.
*
* @return a list of all artists
* @throws RainwaveException in case of problem understanding the response
*/
public Artist[] fetchArtists() throws RainwaveException {
final String path = "all_artists";
final String returns = "all_artists";
mArtists = requestObject(Method.POST, path, returns, false, Artist[].class);
return mArtists;
}
/**
* Get artist data.
*
* @return all artist data if cached, or null otherwise
*/
public Artist[] getArtists() {
return mArtists;
}
public Artist fetchDetailedArtist(int artist_id) throws RainwaveException {
final String path = "artist";
final String returns = "artist";
return requestObject(Method.POST, path, returns, false, Artist.class,
"id", String.valueOf(artist_id)
);
}
public Album fetchDetailedAlbum(int album_id) throws RainwaveException {
final String path = "album";
final String returns = "album";
return requestObject(Method.POST, path, returns, false, Album.class,
"id", String.valueOf(album_id)
);
}
public Song[] submitRequest(int song_id) throws RainwaveException {
final String path = "request";
final JsonElement root = post(path, "song_id", String.valueOf(song_id));
try {
final Gson gson = getGson();
checkError(JsonHelper.getChild(root, "request_result"));
return gson.fromJson(JsonHelper.getChild(root, "requests"), Song[].class);
}
catch(final JsonParseException exc) {
throw wrapException(exc, path);
}
}
/**
* Fetch a full resolution album art.
*
* @param path base url to album art
* @return bitmap of album art
* @throws IOException
*/
public Bitmap fetchAlbumArt(String path) throws IOException {
return fetchAlbumArtHelper(path + ".jpg");
}
/**
* Fetch a minimum width album art. The returned bitmap is guaranteed to
* be at least the requested width.
*
* @param path base url to album art
* @param width minimum width required
* @return bitmap of album art
* @throws IOException
*/
public Bitmap fetchAlbumArt(String path, int width) throws IOException {
if(width <= 120) {
return fetchAlbumArtHelper(path + "_120.jpg");
}
else if(width <= 240) {
return fetchAlbumArtHelper(path + "_240.jpg");
}
else {
return fetchAlbumArt(path);
}
}
private Bitmap fetchAlbumArtHelper(String path) throws IOException {
URL url = new URL(getUrl(path));
Log.d(TAG, "GET " + url.toString());
InputStream is = url.openStream();
mCurrentAlbumArt = BitmapFactory.decodeStream(is);
return mCurrentAlbumArt;
}
/**
* Returns the current album art.
* @return album art bitmap if any, null otherwise
*/
public Bitmap getCurrentAlbumArt() {
return mCurrentAlbumArt;
}
/**
* Clears the current album art. Sets it to null.
*/
public void clearCurrentAlbumArt() {
mCurrentAlbumArt = null;
}
public Song[] reorderRequests(Song requests[])
throws RainwaveException {
final String path = "order_requests";
final JsonElement root = post(path,
"order", Rainwave.makeRequestQueueString(requests)
);
try {
final Gson gson = getGson();
checkError(JsonHelper.getChild(root, "order_requests_result"));
return gson.fromJson(JsonHelper.getChild(root, "requests"), Song[].class);
}
catch(final JsonParseException exc) {
throw wrapException(exc, path);
}
}
public void deleteRequest(Song request)
throws RainwaveException {
post("delete_request",
"song_id", String.valueOf(request.getId())
);
}
public void setUserInfo(String userId, String key) {
mUserId = userId;
mKey = key;
}
public void clearUserInfo() {
setUserInfo(null, null);
}
public Song[] cloneRequests() {
return mRequests.clone();
}
public boolean hasStations() {
return mStations != null;
}
public boolean hasLastVote() {
return mLastVoteId >= 0;
}
public int getLastVoteId() {
return mLastVoteId;
}
/**
* Get the difference in seconds between server time and client time. That
* is, serverTime - clientTime.
*
* @return the drift
*/
public long getDrift() {
return mDrift;
}
public Station[] cloneStations() {
return mStations.clone();
}
public void setStation(int stationId) {
mStation = stationId;
Rainwave.putLastStation(mContext, stationId);
}
public Station getStation(int stationId) {
for(int i = 0; i < mStations.length; i++) {
if(mStations[i].getId() == stationId) {
return mStations[i];
}
}
return null;
}
public String getUrl() {
return mBaseUrl.toString();
}
public int getStationId() {
return mStation;
}
public Event getCurrentEvent() {
return mCurrentEvent;
}
public Event getNextEvent() {
return mNextEvents[0];
}
public boolean isTunedIn() {
return mUser != null && mUser.getTunedIn();
}
/**
* Returns true if the server sent a request list on the last sync() or info().
* @return
*/
public boolean hasRequests() {
return mRequests != null;
}
/**
* Returns true if we have set credentials via setUserInfo().
* @return
*/
public boolean hasCredentials() {
return mUserId != null && mKey != null && mUserId.length() > 0 && mKey.length() > 0;
}
/**
* Returns true if the server thinks we are authenticated.
*
* @return true if authenticated, false otherwise
*/
public boolean isAuthenticated() {
return mUser != null;
}
/**
* Returns true if we require a sync. This can be because no current
* event is available or the current event is in the past.
*
* @return true if a sync is required, false otherwise
*/
public boolean requiresSync() {
if(getCurrentEvent() == null) {
return true;
}
long endTime = getCurrentEvent().getEnd() - getDrift();
long utc = System.currentTimeMillis() / 1000;
return utc > endTime;
}
private JsonElement get(String path, String... params)
throws RainwaveException {
return request(Method.GET, path, params);
}
private JsonElement post(String path, String... params)
throws RainwaveException {
return request(Method.POST, path, params);
}
private JsonElement request(final Method method, String path, String... params)
throws RainwaveException {
// Construct arguments
Arguments httpArgs = new Arguments(params);
httpArgs.put(NAME_STATION, String.valueOf(mStation));
if(this.hasCredentials()) {
httpArgs.put(NAME_USERID, mUserId);
httpArgs.put(NAME_KEY, mKey);
}
HttpURLConnection conn = null;
final Resources r = mContext.getResources();
long requestEnd;
try {
switch(method) {
case POST:
conn = HttpHelper.makePost(mBaseUrl, path, httpArgs.encode());
break;
case GET:
conn = HttpHelper.makeGet(mBaseUrl, String.format("%s?%s", path, httpArgs.encode()));
break;
default:
throw new IllegalArgumentException("Unhandled HTTP method!");
}
final int statusCode = conn.getResponseCode();
switch(statusCode) {
case HttpURLConnection.HTTP_FORBIDDEN:
throw new RainwaveException(r.getString(R.string.msg_forbidden), statusCode);
}
// log timestamp to calculate drift
requestEnd = System.currentTimeMillis() / 1000;
}
catch(IOException exc) {
throw new RainwaveException(r.getString(R.string.msg_genericError), exc);
}
final JsonElement root;
try {
JsonParser parser = new JsonParser();
final InputStream is = conn.getInputStream();
final InputStreamReader reader = new InputStreamReader(is);
root = parser.parse(reader);
}
catch(IOException exc) {
throw new RainwaveException(r.getString(R.string.msg_genericError), exc);
}
// most every endpoint returns api_info, so we can try and update
// drift whenever possible
if(JsonHelper.hasMember(root, "api_info")) {
final JsonElement api_info = JsonHelper.getChild(root, "api_info");
if(JsonHelper.hasMember(api_info, "time")) {
mDrift = JsonHelper.getLong(api_info, "time") - requestEnd;
}
}
return root;
}
/**
* Fetches a single object from the API.
*
* @param method either Method.GET or Method.POST
* @param path endpoint name
* @param name member name
* @param checkError check for the success in a member named "success"
* @param classOfT
* @param params
* @return
* @throws RainwaveException
* @throws IOException
*/
private <T> T requestObject(
final Method method, final String path, final String name,
final boolean checkError, Class<T> classOfT, final String... params
) throws RainwaveException {
// Convert the json into Java objects.
Gson gson = getGson();
final JsonElement json = request(method, path, params);
try {
final JsonElement element = JsonHelper.getChild(json, name);
if(checkError) {
checkError(element);
}
return gson.fromJson(element, classOfT);
}
catch(JsonParseException e) {
throw wrapException(e, path);
}
}
/**
* Check for errors in a JSON response. Checks the value of a member
* name called "success" and if it is false, throw an error containing
* the contents of a field called "text".
*
* @param element JSON object to check
* @throws RainwaveException if an error is detected
*/
private void checkError(final JsonElement element) throws RainwaveException {
if(!JsonHelper.getBoolean(element, "success")) {
throw new RainwaveException(
JsonHelper.getString(element, "text", mContext.getString(R.string.msg_genericError)),
RainwaveException.STATUS_UNKNOWN,
JsonHelper.getInt(element, "code", RainwaveException.ERROR_UNKNOWN)
);
}
}
private RainwaveException wrapException(final JsonParseException exc, final String path)
throws RainwaveException {
Resources r = mContext.getResources();
String msg = String.format(r.getString(R.string.msgfmt_parseError), path, exc.getMessage());
throw new RainwaveException(msg, exc);
}
private String getUrl(String path) throws MalformedURLException {
if (path == null || path.length() == 0)
return mBaseUrl.toString();
final URL url = new URL(mBaseUrl, path);
return url.toString();
}
/**
* Restore a session from a previously saved one.
*
* @param ctx the new context for the session
*/
public void unpickle(Context ctx) {
mContext = ctx;
mStation = Rainwave.getLastStation(ctx, mStation);
setUserInfo(Rainwave.getUserId(ctx), Rainwave.getKey(ctx));
try {
mBaseUrl = new URL(Rainwave.getUrl(ctx));
} catch (MalformedURLException e) {
mBaseUrl = Rainwave.DEFAULT_URL;
}
}
public void pickle(Context ctx) {
mContext = ctx;
Rainwave.putLastStation(mContext, mStation);
Rainwave.putUserId(mContext, mUserId);
Rainwave.putKey(mContext, mKey);
// TODO: Rainwave.putUrl() ?
}
/** The singleton */
private static Session sInstance;
public static Session getInstance() {
if(sInstance == null) {
sInstance = new Session();
}
return sInstance;
}
private static Gson getGson() {
GsonBuilder builder = new GsonBuilder();
builder.registerTypeAdapter(Album.class, new Album.Deserializer());
builder.registerTypeAdapter(Song.class, new Song.Deserializer());
builder.registerTypeAdapter(Event.class, new Event.Deserializer());
builder.registerTypeAdapter(Artist.class, new Artist.Deserializer());
builder.registerTypeAdapter(User.class, new User.Deserializer());
builder.registerTypeAdapter(Station.class, new Station.Deserializer());
builder.registerTypeAdapter(SongRating.class, new SongRating.Deserializer());
builder.registerTypeAdapter(SongRating.AlbumRating.class, new SongRating.AlbumRating.Deserializer());
return builder.create();
}
private static enum Method {
GET, POST
}
public static final String
NAME_STATION = "sid",
NAME_USERID = "user_id",
NAME_KEY = "key";
}
|
docstring for Session.getLastVoteId()
|
src/cc/rainwave/android/api/Session.java
|
docstring for Session.getLastVoteId()
|
|
Java
|
bsd-3-clause
|
96b95b9a1e1593bb81a3e63da92799e2152a7ef9
| 0
|
zzuegg/jmonkeyengine,atomixnmc/jmonkeyengine,atomixnmc/jmonkeyengine,jMonkeyEngine/jmonkeyengine,jMonkeyEngine/jmonkeyengine,zzuegg/jmonkeyengine,atomixnmc/jmonkeyengine,jMonkeyEngine/jmonkeyengine,zzuegg/jmonkeyengine,zzuegg/jmonkeyengine,atomixnmc/jmonkeyengine,atomixnmc/jmonkeyengine,atomixnmc/jmonkeyengine,jMonkeyEngine/jmonkeyengine
|
package com.jme3.scene.plugins.gltf;
import com.google.gson.*;
import com.google.gson.stream.JsonReader;
import com.jme3.animation.*;
import com.jme3.asset.*;
import com.jme3.material.Material;
import com.jme3.material.RenderState;
import com.jme3.math.*;
import com.jme3.renderer.Camera;
import com.jme3.renderer.queue.RenderQueue;
import com.jme3.scene.*;
import com.jme3.scene.control.CameraControl;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import com.jme3.util.IntMap;
import com.jme3.util.mikktspace.MikktspaceTangentGenerator;
import javax.xml.bind.DatatypeConverter;
import java.io.*;
import java.nio.Buffer;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import static com.jme3.scene.plugins.gltf.GltfUtils.*;
/**
* GLTF 2.0 loader
* Created by Nehon on 07/08/2017.
*/
public class GltfLoader implements AssetLoader {
private static final Logger logger = Logger.getLogger(GltfLoader.class.getName());
//Data cache for already parsed JME objects
private Map<String, Object[]> dataCache = new HashMap<>();
private JsonArray scenes;
private JsonArray nodes;
private JsonArray meshes;
private JsonArray accessors;
private JsonArray bufferViews;
private JsonArray buffers;
private JsonArray materials;
private JsonArray textures;
private JsonArray images;
private JsonArray samplers;
private JsonArray animations;
private JsonArray skins;
private JsonArray cameras;
private Material defaultMat;
private AssetInfo info;
private JsonObject docRoot;
private Node rootNode;
private FloatArrayPopulator floatArrayPopulator = new FloatArrayPopulator();
private Vector3fArrayPopulator vector3fArrayPopulator = new Vector3fArrayPopulator();
private QuaternionArrayPopulator quaternionArrayPopulator = new QuaternionArrayPopulator();
private Matrix4fArrayPopulator matrix4fArrayPopulator = new Matrix4fArrayPopulator();
private static Map<String, MaterialAdapter> defaultMaterialAdapters = new HashMap<>();
private CustomContentManager customContentManager = new CustomContentManager();
private boolean useNormalsFlag = false;
private Quaternion tmpQuat = new Quaternion();
private Transform tmpTransforms = new Transform();
private Transform tmpTransforms2 = new Transform();
private Matrix4f tmpMat = new Matrix4f();
Map<SkinData, List<Spatial>> skinnedSpatials = new HashMap<>();
IntMap<SkinBuffers> skinBuffers = new IntMap<>();
static {
defaultMaterialAdapters.put("pbrMetallicRoughness", new PBRMetalRoughMaterialAdapter());
}
@Override
public Object load(AssetInfo assetInfo) throws IOException {
return loadFromStream(assetInfo, assetInfo.openStream());
}
protected Object loadFromStream(AssetInfo assetInfo, InputStream stream) throws IOException {
try {
dataCache.clear();
info = assetInfo;
skinnedSpatials.clear();
rootNode = new Node();
if (defaultMat == null) {
defaultMat = new Material(assetInfo.getManager(), "Common/MatDefs/Light/PBRLighting.j3md");
defaultMat.setColor("BaseColor", ColorRGBA.White);
defaultMat.setFloat("Metallic", 0f);
defaultMat.setFloat("Roughness", 1f);
}
docRoot = new JsonParser().parse(new JsonReader(new InputStreamReader(stream))).getAsJsonObject();
JsonObject asset = docRoot.getAsJsonObject().get("asset").getAsJsonObject();
String generator = getAsString(asset, "generator");
String version = getAsString(asset, "version");
String minVersion = getAsString(asset, "minVersion");
if (!isSupported(version, minVersion)) {
logger.log(Level.SEVERE, "Gltf Loader doesn't support this gltf version: " + version + (minVersion != null ? ("/" + minVersion) : ""));
}
scenes = docRoot.getAsJsonArray("scenes");
nodes = docRoot.getAsJsonArray("nodes");
meshes = docRoot.getAsJsonArray("meshes");
accessors = docRoot.getAsJsonArray("accessors");
bufferViews = docRoot.getAsJsonArray("bufferViews");
buffers = docRoot.getAsJsonArray("buffers");
materials = docRoot.getAsJsonArray("materials");
textures = docRoot.getAsJsonArray("textures");
images = docRoot.getAsJsonArray("images");
samplers = docRoot.getAsJsonArray("samplers");
animations = docRoot.getAsJsonArray("animations");
skins = docRoot.getAsJsonArray("skins");
cameras = docRoot.getAsJsonArray("cameras");
customContentManager.init(this);
readSkins();
readCameras();
JsonPrimitive defaultScene = docRoot.getAsJsonPrimitive("scene");
readScenes(defaultScene, rootNode);
rootNode = customContentManager.readExtensionAndExtras("root", docRoot, rootNode);
setupControls();
//Loading animations
if (animations != null) {
for (int i = 0; i < animations.size(); i++) {
readAnimation(i);
}
}
//only one scene let's not return the root.
if (rootNode.getChildren().size() == 1) {
rootNode = (Node) rootNode.getChild(0);
}
//no name for the scene... let's set the file name.
if (rootNode.getName() == null) {
rootNode.setName(assetInfo.getKey().getName());
}
return rootNode;
} catch (Exception e) {
throw new AssetLoadException("An error occurred loading " + assetInfo.getKey().getName(), e);
} finally {
stream.close();
}
}
private void setDefaultParams(Material mat) {
mat.setColor("BaseColor", ColorRGBA.White);
mat.setFloat("Metallic", 0f);
mat.setFloat("Roughness", 1f);
}
private boolean isSupported(String version, String minVersion) {
return "2.0".equals(version);
}
public void readScenes(JsonPrimitive defaultScene, Node rootNode) throws IOException {
if (scenes == null) {
//no scene... lets handle this later...
throw new AssetLoadException("Gltf files with no scene is not yet supported");
}
for (JsonElement scene : scenes) {
Node sceneNode = new Node();
//specs says that only the default scene should be rendered,
// if there are several scenes, they are attached to the rootScene, but they are culled
sceneNode.setCullHint(Spatial.CullHint.Always);
sceneNode.setName(getAsString(scene.getAsJsonObject(), "name"));
JsonArray sceneNodes = scene.getAsJsonObject().getAsJsonArray("nodes");
sceneNode = customContentManager.readExtensionAndExtras("scene", scene, sceneNode);
rootNode.attachChild(sceneNode);
for (JsonElement node : sceneNodes) {
readChild(sceneNode, node);
}
}
//Setting the default scene cul hint to inherit.
int activeChild = 0;
if (defaultScene != null) {
activeChild = defaultScene.getAsInt();
}
rootNode.getChild(activeChild).setCullHint(Spatial.CullHint.Inherit);
}
public Object readNode(int nodeIndex) throws IOException {
Object obj = fetchFromCache("nodes", nodeIndex, Object.class);
if (obj != null) {
if (obj instanceof BoneWrapper) {
//the node can be a previously loaded bone let's return it
return obj;
} else {
//If a spatial is referenced several times, it may be attached to different parents,
// and it's not possible in JME, so we have to clone it.
return ((Spatial) obj).clone();
}
}
Spatial spatial;
JsonObject nodeData = nodes.get(nodeIndex).getAsJsonObject();
JsonArray children = nodeData.getAsJsonArray("children");
Integer meshIndex = getAsInteger(nodeData, "mesh");
if (meshIndex != null) {
assertNotNull(meshes, "Can't find any mesh data, yet a node references a mesh");
//there is a mesh in this node, however gltf can split meshes in primitives (some kind of sub meshes),
//We don't have this in JME so we have to make one mesh and one Geometry for each primitive.
Geometry[] primitives = readMeshPrimitives(meshIndex);
if (primitives.length == 1 && children == null) {
//only one geometry, let's not wrap it in another node unless the node has children.
spatial = primitives[0];
} else {
//several geometries, let's make a parent Node and attach them to it
Node node = new Node();
for (Geometry primitive : primitives) {
node.attachChild(primitive);
}
spatial = node;
}
spatial.setName(readMeshName(meshIndex));
} else {
//no mesh, we have a node. Can be a camera node or a regular node.
Integer camIndex = getAsInteger(nodeData, "camera");
if (camIndex != null) {
Camera cam = fetchFromCache("cameras", camIndex, Camera.class);
CameraNode node = new CameraNode(null, cam);
node.setControlDir(CameraControl.ControlDirection.SpatialToCamera);
spatial = node;
} else {
Node node = new Node();
spatial = node;
}
}
Integer skinIndex = getAsInteger(nodeData, "skin");
if (skinIndex != null) {
SkinData skinData = fetchFromCache("skins", skinIndex, SkinData.class);
List<Spatial> spatials = skinnedSpatials.get(skinData);
spatials.add(spatial);
skinData.used = true;
}
spatial.setLocalTransform(readTransforms(nodeData));
if (spatial.getName() == null) {
spatial.setName(getAsString(nodeData.getAsJsonObject(), "name"));
}
spatial = customContentManager.readExtensionAndExtras("node", nodeData, spatial);
addToCache("nodes", nodeIndex, spatial, nodes.size());
return spatial;
}
private void readChild(Spatial parent, JsonElement nodeIndex) throws IOException {
Object loaded = readNode(nodeIndex.getAsInt());
if (loaded instanceof Spatial) {
Spatial spatial = ((Spatial) loaded);
((Node) parent).attachChild(spatial);
JsonObject nodeElem = nodes.get(nodeIndex.getAsInt()).getAsJsonObject();
JsonArray children = nodeElem.getAsJsonArray("children");
if (children != null) {
for (JsonElement child : children) {
readChild(spatial, child);
}
}
} else if (loaded instanceof BoneWrapper) {
//parent is the Armature Node, we have to apply its transforms to the root bone's animation data
BoneWrapper bw = (BoneWrapper) loaded;
bw.isRoot = true;
SkinData skinData = fetchFromCache("skins", bw.skinIndex, SkinData.class);
if (skinData == null) {
return;
}
skinData.parent = parent;
}
}
public Transform readTransforms(JsonObject nodeData) {
Transform transform = new Transform();
JsonArray matrix = nodeData.getAsJsonArray("matrix");
if (matrix != null) {
//transforms are given as a mat4
float[] tmpArray = new float[16];
for (int i = 0; i < tmpArray.length; i++) {
tmpArray[i] = matrix.get(i).getAsFloat();
}
//creates a row major matrix from color major data
Matrix4f mat = new Matrix4f(tmpArray);
transform.fromTransformMatrix(mat);
return transform;
}
//no matrix transforms: no transforms or transforms givens as translation/rotation/scale
JsonArray translation = nodeData.getAsJsonArray("translation");
if (translation != null) {
transform.setTranslation(
translation.get(0).getAsFloat(),
translation.get(1).getAsFloat(),
translation.get(2).getAsFloat());
}
JsonArray rotation = nodeData.getAsJsonArray("rotation");
if (rotation != null) {
transform.setRotation(new Quaternion(
rotation.get(0).getAsFloat(),
rotation.get(1).getAsFloat(),
rotation.get(2).getAsFloat(),
rotation.get(3).getAsFloat()));
}
JsonArray scale = nodeData.getAsJsonArray("scale");
if (scale != null) {
transform.setScale(
scale.get(0).getAsFloat(),
scale.get(1).getAsFloat(),
scale.get(2).getAsFloat());
}
return transform;
}
public Geometry[] readMeshPrimitives(int meshIndex) throws IOException {
Geometry[] geomArray = (Geometry[]) fetchFromCache("meshes", meshIndex, Object.class);
if (geomArray != null) {
//cloning the geoms.
Geometry[] geoms = new Geometry[geomArray.length];
for (int i = 0; i < geoms.length; i++) {
geoms[i] = geomArray[i].clone(false);
}
return geoms;
}
JsonObject meshData = meshes.get(meshIndex).getAsJsonObject();
JsonArray primitives = meshData.getAsJsonArray("primitives");
assertNotNull(primitives, "Can't find any primitives in mesh " + meshIndex);
String name = getAsString(meshData, "name");
geomArray = new Geometry[primitives.size()];
int index = 0;
for (JsonElement primitive : primitives) {
JsonObject meshObject = primitive.getAsJsonObject();
Mesh mesh = new Mesh();
Integer mode = getAsInteger(meshObject, "mode");
mesh.setMode(getMeshMode(mode));
Integer indices = getAsInteger(meshObject, "indices");
if (indices != null) {
mesh.setBuffer(readAccessorData(indices, new VertexBufferPopulator(VertexBuffer.Type.Index)));
}
JsonObject attributes = meshObject.getAsJsonObject("attributes");
assertNotNull(attributes, "No attributes defined for mesh " + mesh);
skinBuffers.clear();
for (Map.Entry<String, JsonElement> entry : attributes.entrySet()) {
//special case for joints and weights buffer. If there are more than 4 bones per vertex, there might be several of them
//we need to read them all and to keep only the 4 that have the most weight on the vertex.
String bufferType = entry.getKey();
if (bufferType.startsWith("JOINTS")) {
SkinBuffers buffs = getSkinBuffers(bufferType);
SkinBuffers buffer = readAccessorData(entry.getValue().getAsInt(), new JointArrayPopulator());
buffs.joints = buffer.joints;
buffs.componentSize = buffer.componentSize;
} else if (bufferType.startsWith("WEIGHTS")) {
SkinBuffers buffs = getSkinBuffers(bufferType);
buffs.weights = readAccessorData(entry.getValue().getAsInt(), new FloatArrayPopulator());
} else {
VertexBuffer vb = readAccessorData(entry.getValue().getAsInt(), new VertexBufferPopulator(getVertexBufferType(bufferType)));
if (vb != null) {
mesh.setBuffer(vb);
}
}
}
handleSkinningBuffers(mesh, skinBuffers);
if (mesh.getBuffer(VertexBuffer.Type.BoneIndex) != null) {
//the mesh has some skinning let's create needed buffers for HW skinning
//creating empty buffers for HW skinning
//the buffers will be setup if ever used.
VertexBuffer weightsHW = new VertexBuffer(VertexBuffer.Type.HWBoneWeight);
VertexBuffer indicesHW = new VertexBuffer(VertexBuffer.Type.HWBoneIndex);
//setting usage to cpuOnly so that the buffer is not send empty to the GPU
indicesHW.setUsage(VertexBuffer.Usage.CpuOnly);
weightsHW.setUsage(VertexBuffer.Usage.CpuOnly);
mesh.setBuffer(weightsHW);
mesh.setBuffer(indicesHW);
mesh.generateBindPose();
}
mesh = customContentManager.readExtensionAndExtras("primitive", meshObject, mesh);
Geometry geom = new Geometry(null, mesh);
Integer materialIndex = getAsInteger(meshObject, "material");
if (materialIndex == null) {
geom.setMaterial(defaultMat);
} else {
useNormalsFlag = false;
geom.setMaterial(readMaterial(materialIndex));
if (geom.getMaterial().getAdditionalRenderState().getBlendMode() == RenderState.BlendMode.Alpha) {
//Alpha blending is on on this material let's place the geom in the transparent bucket
geom.setQueueBucket(RenderQueue.Bucket.Transparent);
}
if (useNormalsFlag && mesh.getBuffer(VertexBuffer.Type.Tangent) == null) {
//No tangent buffer, but there is a normal map, we have to generate them using MiiktSpace
MikktspaceTangentGenerator.generate(geom);
}
}
if (name != null) {
geom.setName(name + (primitives.size() > 1 ? ("_" + index) : ""));
}
geom.updateModelBound();
geomArray[index] = geom;
index++;
//TODO targets(morph anim...)
}
geomArray = customContentManager.readExtensionAndExtras("mesh", meshData, geomArray);
addToCache("meshes", meshIndex, geomArray, meshes.size());
return geomArray;
}
private SkinBuffers getSkinBuffers(String bufferType) {
int bufIndex = getIndex(bufferType);
SkinBuffers buffs = skinBuffers.get(bufIndex);
if (buffs == null) {
buffs = new SkinBuffers();
skinBuffers.put(bufIndex, buffs);
}
return buffs;
}
public <R> R readAccessorData(int accessorIndex, Populator<R> populator) throws IOException {
assertNotNull(accessors, "No accessor attribute in the gltf file");
JsonObject accessor = accessors.get(accessorIndex).getAsJsonObject();
Integer bufferViewIndex = getAsInteger(accessor, "bufferView");
int byteOffset = getAsInteger(accessor, "byteOffset", 0);
Integer componentType = getAsInteger(accessor, "componentType");
assertNotNull(componentType, "No component type defined for accessor " + accessorIndex);
Integer count = getAsInteger(accessor, "count");
assertNotNull(count, "No count attribute defined for accessor " + accessorIndex);
String type = getAsString(accessor, "type");
assertNotNull(type, "No type attribute defined for accessor " + accessorIndex);
boolean normalized = getAsBoolean(accessor, "normalized", false);
//TODO min / max...don't know what to do about them.
//TODO sparse
R data = populator.populate(bufferViewIndex, componentType, type, count, byteOffset, normalized);
data = customContentManager.readExtensionAndExtras("accessor", accessor, data);
return data;
}
public Object readBuffer(Integer bufferViewIndex, int byteOffset, int count, Object store, int numComponents, VertexBuffer.Format format) throws IOException {
JsonObject bufferView = bufferViews.get(bufferViewIndex).getAsJsonObject();
Integer bufferIndex = getAsInteger(bufferView, "buffer");
assertNotNull(bufferIndex, "No buffer defined for bufferView " + bufferViewIndex);
int bvByteOffset = getAsInteger(bufferView, "byteOffset", 0);
Integer byteLength = getAsInteger(bufferView, "byteLength");
assertNotNull(byteLength, "No byte length defined for bufferView " + bufferViewIndex);
int byteStride = getAsInteger(bufferView, "byteStride", 0);
//target defines ELEMENT_ARRAY_BUFFER or ARRAY_BUFFER, but we already know that since we know we load the indexbuffer or any other...
//not sure it's useful for us, but I guess it's useful when you map data directly to the GPU.
//int target = getAsInteger(bufferView, "target", 0);
byte[] data = readData(bufferIndex);
data = customContentManager.readExtensionAndExtras("bufferView", bufferView, data);
if (store == null) {
store = new byte[byteLength];
}
if (count == -1) {
count = byteLength;
}
populateBuffer(store, data, count, byteOffset + bvByteOffset, byteStride, numComponents, format);
return store;
}
public byte[] readData(int bufferIndex) throws IOException {
assertNotNull(buffers, "No buffer defined");
JsonObject buffer = buffers.get(bufferIndex).getAsJsonObject();
String uri = getAsString(buffer, "uri");
Integer bufferLength = getAsInteger(buffer, "byteLength");
assertNotNull(bufferLength, "No byteLength defined for buffer " + bufferIndex);
byte[] data = (byte[]) fetchFromCache("buffers", bufferIndex, Object.class);
if (data != null) {
return data;
}
data = getBytes(bufferIndex, uri, bufferLength);
data = customContentManager.readExtensionAndExtras("buffer", buffer, data);
addToCache("buffers", bufferIndex, data, buffers.size());
return data;
}
protected byte[] getBytes(int bufferIndex, String uri, Integer bufferLength) throws IOException {
byte[] data;
if (uri != null) {
if (uri.startsWith("data:")) {
//base 64 embed data
data = DatatypeConverter.parseBase64Binary(uri.substring(uri.indexOf(",") + 1));
} else {
//external file let's load it
if (!uri.endsWith(".bin")) {
throw new AssetLoadException("Cannot load " + uri + ", a .bin extension is required.");
}
BinDataKey key = new BinDataKey(info.getKey().getFolder() + uri);
InputStream input = (InputStream) info.getManager().loadAsset(key);
data = new byte[bufferLength];
DataInputStream dataStream = new DataInputStream(input);
dataStream.readFully(data);
dataStream.close();
}
} else {
//no URI this should not happen in a gltf file, only in glb files.
throw new AssetLoadException("Buffer " + bufferIndex + " has no uri");
}
return data;
}
public Material readMaterial(int materialIndex) throws IOException {
assertNotNull(materials, "There is no material defined yet a mesh references one");
JsonObject matData = materials.get(materialIndex).getAsJsonObject();
JsonObject pbrMat = matData.getAsJsonObject("pbrMetallicRoughness");
MaterialAdapter adapter = null;
if (pbrMat != null) {
adapter = getAdapterForMaterial(info, "pbrMetallicRoughness");
if (adapter == null) {
adapter = defaultMaterialAdapters.get("pbrMetallicRoughness");
}
adapter.init(info.getManager());
}
adapter = customContentManager.readExtensionAndExtras("material", matData, adapter);
if (adapter == null) {
logger.log(Level.WARNING, "Couldn't find any matching material definition for material " + materialIndex);
adapter = defaultMaterialAdapters.get("pbrMetallicRoughness");
adapter.init(info.getManager());
setDefaultParams(adapter.getMaterial());
}
if (pbrMat != null) {
adapter.setParam("baseColorFactor", getAsColor(pbrMat, "baseColorFactor", ColorRGBA.White));
adapter.setParam("metallicFactor", getAsFloat(pbrMat, "metallicFactor", 1f));
adapter.setParam("roughnessFactor", getAsFloat(pbrMat, "roughnessFactor", 1f));
adapter.setParam("baseColorTexture", readTexture(pbrMat.getAsJsonObject("baseColorTexture")));
adapter.setParam("metallicRoughnessTexture", readTexture(pbrMat.getAsJsonObject("metallicRoughnessTexture")));
}
adapter.getMaterial().setName(getAsString(matData, "name"));
adapter.setParam("emissiveFactor", getAsColor(matData, "emissiveFactor", ColorRGBA.Black));
String alphaMode = getAsString(matData, "alphaMode");
adapter.setParam("alphaMode", alphaMode);
if (alphaMode != null && alphaMode.equals("MASK")) {
adapter.setParam("alphaCutoff", getAsFloat(matData, "alphaCutoff"));
}
adapter.setParam("doubleSided", getAsBoolean(matData, "doubleSided"));
Texture2D normal = readTexture(matData.getAsJsonObject("normalTexture"));
adapter.setParam("normalTexture", normal);
if (normal != null) {
useNormalsFlag = true;
}
adapter.setParam("occlusionTexture", readTexture(matData.getAsJsonObject("occlusionTexture")));
adapter.setParam("emissiveTexture", readTexture(matData.getAsJsonObject("emissiveTexture")));
return adapter.getMaterial();
}
public void readCameras() throws IOException {
if (cameras == null) {
return;
}
for (int i = 0; i < cameras.size(); i++) {
//Can't access resolution here... actually it's a shame we can't access settings from anywhere.
//users will have to call resize ont he camera.
Camera cam = new Camera(1, 1);
JsonObject camObj = cameras.get(i).getAsJsonObject();
String type = getAsString(camObj, "type");
assertNotNull(type, "No type defined ofr camera");
JsonObject camData = camObj.getAsJsonObject(type);
if (type.equals("perspective")) {
float aspectRatio = getAsFloat(camData, "aspectRation", 1f);
Float yfov = getAsFloat(camData, "yfov");
assertNotNull(yfov, "No yfov for perspective camera");
Float znear = getAsFloat(camData, "znear");
assertNotNull(znear, "No znear for perspective camere");
Float zfar = getAsFloat(camData, "zfar", znear * 1000f);
cam.setFrustumPerspective(yfov * FastMath.RAD_TO_DEG, aspectRatio, znear, zfar);
cam = customContentManager.readExtensionAndExtras("camera.perspective", camData, cam);
} else {
Float xmag = getAsFloat(camData, "xmag");
assertNotNull(xmag, "No xmag for orthographic camera");
Float ymag = getAsFloat(camData, "ymag");
assertNotNull(ymag, "No ymag for orthographic camera");
Float znear = getAsFloat(camData, "znear");
assertNotNull(znear, "No znear for orthographic camere");
Float zfar = getAsFloat(camData, "zfar", znear * 1000f);
assertNotNull(zfar, "No zfar for orthographic camera");
cam.setParallelProjection(true);
cam.setFrustum(znear, zfar, -xmag, xmag, ymag, -ymag);
cam = customContentManager.readExtensionAndExtras("camera.orthographic", camData, cam);
}
cam = customContentManager.readExtensionAndExtras("camera", camObj, cam);
addToCache("cameras", i, cam, cameras.size());
}
}
public Texture2D readTexture(JsonObject texture) throws IOException {
return readTexture(texture, false);
}
public Texture2D readTexture(JsonObject texture, boolean flip) throws IOException {
if (texture == null) {
return null;
}
Integer textureIndex = getAsInteger(texture, "index");
assertNotNull(textureIndex, "Texture as no index");
assertNotNull(textures, "There are no textures, yet one is referenced by a material");
JsonObject textureData = textures.get(textureIndex).getAsJsonObject();
Integer sourceIndex = getAsInteger(textureData, "source");
Integer samplerIndex = getAsInteger(textureData, "sampler");
Texture2D texture2d = readImage(sourceIndex, flip);
if (samplerIndex != null) {
texture2d = readSampler(samplerIndex, texture2d);
} else {
texture2d.setWrap(Texture.WrapMode.Repeat);
}
texture2d = customContentManager.readExtensionAndExtras("texture", texture, texture2d);
return texture2d;
}
public Texture2D readImage(int sourceIndex, boolean flip) throws IOException {
if (images == null) {
throw new AssetLoadException("No image defined");
}
JsonObject image = images.get(sourceIndex).getAsJsonObject();
String uri = getAsString(image, "uri");
Integer bufferView = getAsInteger(image, "bufferView");
String mimeType = getAsString(image, "mimeType");
Texture2D result;
if (uri == null) {
assertNotNull(bufferView, "Image " + sourceIndex + " should either have an uri or a bufferView");
assertNotNull(mimeType, "Image " + sourceIndex + " should have a mimeType");
byte[] data = (byte[]) readBuffer(bufferView, 0, -1, null, 1, VertexBuffer.Format.Byte);
String extension = mimeType.split("/")[1];
TextureKey key = new TextureKey("image" + sourceIndex + "." + extension, flip);
result = (Texture2D) info.getManager().loadAssetFromStream(key, new ByteArrayInputStream(data));
} else if (uri.startsWith("data:")) {
//base64 encoded image
String[] uriInfo = uri.split(",");
byte[] data = DatatypeConverter.parseBase64Binary(uriInfo[1]);
String headerInfo = uriInfo[0].split(";")[0];
String extension = headerInfo.split("/")[1];
TextureKey key = new TextureKey("image" + sourceIndex + "." + extension, flip);
result = (Texture2D) info.getManager().loadAssetFromStream(key, new ByteArrayInputStream(data));
} else {
//external file image
TextureKey key = new TextureKey(info.getKey().getFolder() + uri, flip);
Texture tex = info.getManager().loadTexture(key);
result = (Texture2D) tex;
}
return result;
}
public void readAnimation(int animationIndex) throws IOException {
JsonObject animation = animations.get(animationIndex).getAsJsonObject();
JsonArray channels = animation.getAsJsonArray("channels");
JsonArray samplers = animation.getAsJsonArray("samplers");
String name = getAsString(animation, "name");
assertNotNull(channels, "No channels for animation " + name);
assertNotNull(samplers, "No samplers for animation " + name);
//temp data storage of track data
TrackData[] tracks = new TrackData[nodes.size()];
for (JsonElement channel : channels) {
JsonObject target = channel.getAsJsonObject().getAsJsonObject("target");
Integer targetNode = getAsInteger(target, "node");
String targetPath = getAsString(target, "path");
if (targetNode == null) {
//no target node for the channel, specs say to ignore the channel.
continue;
}
assertNotNull(targetPath, "No target path for channel");
if (targetPath.equals("weight")) {
//Morph animation, not implemented in JME, let's warn the user and skip the channel
logger.log(Level.WARNING, "Morph animation is not supported by JME yet, skipping animation");
continue;
}
TrackData trackData = tracks[targetNode];
if (trackData == null) {
trackData = new TrackData();
tracks[targetNode] = trackData;
}
Integer samplerIndex = getAsInteger(channel.getAsJsonObject(), "sampler");
assertNotNull(samplerIndex, "No animation sampler provided for channel");
JsonObject sampler = samplers.get(samplerIndex).getAsJsonObject();
Integer timeIndex = getAsInteger(sampler, "input");
assertNotNull(timeIndex, "No input accessor Provided for animation sampler");
Integer dataIndex = getAsInteger(sampler, "output");
assertNotNull(dataIndex, "No output accessor Provided for animation sampler");
String interpolation = getAsString(sampler, "interpolation");
if (interpolation == null || !interpolation.equals("LINEAR")) {
//JME anim system only supports Linear interpolation (will be possible with monkanim though)
//TODO rework this once monkanim is core, or allow a hook for animation loading to fit custom animation systems
logger.log(Level.WARNING, "JME only supports linear interpolation for animations");
}
trackData = customContentManager.readExtensionAndExtras("animation.sampler", sampler, trackData);
float[] times = fetchFromCache("accessors", timeIndex, float[].class);
if (times == null) {
times = readAccessorData(timeIndex, floatArrayPopulator);
addToCache("accessors", timeIndex, times, accessors.size());
}
if (targetPath.equals("translation")) {
trackData.timeArrays.add(new TrackData.TimeData(times, TrackData.Type.Translation));
Vector3f[] translations = readAccessorData(dataIndex, vector3fArrayPopulator);
trackData.translations = translations;
} else if (targetPath.equals("scale")) {
trackData.timeArrays.add(new TrackData.TimeData(times, TrackData.Type.Scale));
Vector3f[] scales = readAccessorData(dataIndex, vector3fArrayPopulator);
trackData.scales = scales;
} else if (targetPath.equals("rotation")) {
trackData.timeArrays.add(new TrackData.TimeData(times, TrackData.Type.Rotation));
Quaternion[] rotations = readAccessorData(dataIndex, quaternionArrayPopulator);
trackData.rotations = rotations;
} else {
//TODO support weights
logger.log(Level.WARNING, "Morph animation is not supported");
continue;
}
tracks[targetNode] = customContentManager.readExtensionAndExtras("channel", channel, trackData);
}
if (name == null) {
name = "anim_" + animationIndex;
}
List<Spatial> spatials = new ArrayList<>();
Animation anim = new Animation();
anim.setName(name);
int skinIndex = -1;
List<Bone> usedBones = new ArrayList<>();
for (int i = 0; i < tracks.length; i++) {
TrackData trackData = tracks[i];
if (trackData == null || trackData.timeArrays.isEmpty()) {
continue;
}
trackData.update();
if (trackData.length > anim.getLength()) {
anim.setLength(trackData.length);
}
Object node = fetchFromCache("nodes", i, Object.class);
if (node instanceof Spatial) {
Spatial s = (Spatial) node;
spatials.add(s);
SpatialTrack track = new SpatialTrack(trackData.times, trackData.translations, trackData.rotations, trackData.scales);
track.setTrackSpatial(s);
anim.addTrack(track);
} else if (node instanceof BoneWrapper) {
BoneWrapper b = (BoneWrapper) node;
//apply the inverseBindMatrix to animation data.
b.update(trackData);
usedBones.add(b.bone);
if (skinIndex == -1) {
skinIndex = b.skinIndex;
} else {
//Check if all bones affected by this animation are from the same skin, the track will be skipped.
if (skinIndex != b.skinIndex) {
logger.log(Level.WARNING, "Animation " + animationIndex + " (" + name + ") applies to bones that are not from the same skin: skin " + skinIndex + ", bone " + b.bone.getName() + " from skin " + b.skinIndex);
continue;
}
}
BoneTrack track = new BoneTrack(b.boneIndex, trackData.times, trackData.translations, trackData.rotations, trackData.scales);
anim.addTrack(track);
}
}
// Check each bone to see if their local pose is different from their bind pose.
// If it is, we ensure that the bone has an animation track, else JME way of applying anim transforms will apply the bind pose to those bones,
// instead of the local pose that is supposed to be the default
if (skinIndex != -1) {
SkinData skin = fetchFromCache("skins", skinIndex, SkinData.class);
Skeleton skeleton = skin.skeletonControl.getSkeleton();
for (Bone bone : skin.bones) {
if (!usedBones.contains(bone) && !equalBindAndLocalTransforms(bone)) {
//create a track
float[] times = new float[]{0, anim.getLength()};
Vector3f t = bone.getLocalPosition().subtract(bone.getBindPosition());
Quaternion r = tmpQuat.set(bone.getBindRotation()).inverse().multLocal(bone.getLocalRotation());
Vector3f s = bone.getLocalScale().divide(bone.getBindScale());
Vector3f[] translations = new Vector3f[]{t, t};
Quaternion[] rotations = new Quaternion[]{r, r};
Vector3f[] scales = new Vector3f[]{s, s};
int boneIndex = skeleton.getBoneIndex(bone);
BoneTrack track = new BoneTrack(boneIndex, times, translations, rotations, scales);
anim.addTrack(track);
}
}
}
anim = customContentManager.readExtensionAndExtras("animations", animation, anim);
if (skinIndex != -1) {
//we have a bone animation.
SkinData skin = fetchFromCache("skins", skinIndex, SkinData.class);
skin.animControl.addAnim(anim);
}
if (!spatials.isEmpty()) {
if (skinIndex != -1) {
//there are some spatial tracks in this bone animation... or the other way around. Let's add the spatials in the skinnedSpatials.
SkinData skin = fetchFromCache("skins", skinIndex, SkinData.class);
List<Spatial> spat = skinnedSpatials.get(skin);
spat.addAll(spatials);
//the animControl will be added in the setupControls();
} else {
//Spatial animation
Spatial spatial = null;
if (spatials.size() == 1) {
spatial = spatials.get(0);
} else {
spatial = findCommonAncestor(spatials);
}
AnimControl control = spatial.getControl(AnimControl.class);
if (control == null) {
control = new AnimControl();
spatial.addControl(control);
}
control.addAnim(anim);
}
}
}
public Texture2D readSampler(int samplerIndex, Texture2D texture) throws IOException {
if (samplers == null) {
throw new AssetLoadException("No samplers defined");
}
JsonObject sampler = samplers.get(samplerIndex).getAsJsonObject();
Texture.MagFilter magFilter = getMagFilter(getAsInteger(sampler, "magFilter"));
Texture.MinFilter minFilter = getMinFilter(getAsInteger(sampler, "minFilter"));
Texture.WrapMode wrapS = getWrapMode(getAsInteger(sampler, "wrapS"));
Texture.WrapMode wrapT = getWrapMode(getAsInteger(sampler, "wrapT"));
if (magFilter != null) {
texture.setMagFilter(magFilter);
}
if (minFilter != null) {
texture.setMinFilter(minFilter);
}
texture.setWrap(Texture.WrapAxis.S, wrapS);
texture.setWrap(Texture.WrapAxis.T, wrapT);
texture = customContentManager.readExtensionAndExtras("texture.sampler", sampler, texture);
return texture;
}
public void readSkins() throws IOException {
if (skins == null) {
//no skins, no bone animation.
return;
}
List<JsonArray> allJoints = new ArrayList<>();
for (int index = 0; index < skins.size(); index++) {
JsonObject skin = skins.get(index).getAsJsonObject();
//Note that the "skeleton" index is intentionally ignored.
//It's not mandatory and exporters tends to mix up how it should be used because the specs are not clear.
//Anyway we have other means to detect both armature structures and root bones.
JsonArray joints = skin.getAsJsonArray("joints");
assertNotNull(joints, "No joints defined for skin");
int idx = allJoints.indexOf(joints);
if (idx >= 0) {
//skin already exists let's just set it in the cache
SkinData sd = fetchFromCache("skins", idx, SkinData.class);
addToCache("skins", index, sd, nodes.size());
continue;
} else {
allJoints.add(joints);
}
//These inverse bind matrices, once inverted again, will give us the real bind pose of the bones (in model space),
//since the skeleton in not guaranteed to be exported in bind pose.
Integer matricesIndex = getAsInteger(skin, "inverseBindMatrices");
Matrix4f[] inverseBindMatrices = null;
if (matricesIndex != null) {
inverseBindMatrices = readAccessorData(matricesIndex, matrix4fArrayPopulator);
} else {
inverseBindMatrices = new Matrix4f[joints.size()];
for (int i = 0; i < inverseBindMatrices.length; i++) {
inverseBindMatrices[i] = new Matrix4f();
}
}
Bone[] bones = new Bone[joints.size()];
for (int i = 0; i < joints.size(); i++) {
int boneIndex = joints.get(i).getAsInt();
//we don't need the inverse bind matrix, we need the bind matrix so let's invert it.
Matrix4f modelBindMatrix = inverseBindMatrices[i].invertLocal();
bones[i] = readNodeAsBone(boneIndex, i, index, modelBindMatrix);
}
for (int i = 0; i < joints.size(); i++) {
findChildren(joints.get(i).getAsInt());
}
Skeleton skeleton = new Skeleton(bones);
//Compute bind transforms. We need to do it from root bone to leaves bone.
for (Bone bone : skeleton.getRoots()) {
BoneWrapper bw = findBoneWrapper(bone);
computeBindTransforms(bw, skeleton);
}
skeleton = customContentManager.readExtensionAndExtras("skin", skin, skeleton);
SkinData skinData = new SkinData();
skinData.bones = bones;
skinData.skeletonControl = new SkeletonControl(skeleton);
skinData.animControl = new AnimControl(skinData.skeletonControl.getSkeleton());
addToCache("skins", index, skinData, nodes.size());
skinnedSpatials.put(skinData, new ArrayList<Spatial>());
// Set local transforms.
// The skeleton may come in a given pose, that is not the rest pose, so let 's apply it.
// We will need it later for animation
for (int i = 0; i < joints.size(); i++) {
applyPose(joints.get(i).getAsInt());
}
skeleton.updateWorldVectors();
//If the user didn't ask to keep the pose we reset the skeleton user control
if (!isKeepSkeletonPose(info)) {
for (Bone bone : bones) {
bone.setUserControl(false);
}
}
}
}
private void applyPose(int index) {
BoneWrapper bw = fetchFromCache("nodes", index, BoneWrapper.class);
bw.bone.setUserControl(true);
bw.bone.setLocalTranslation(bw.localTransform.getTranslation());
bw.bone.setLocalRotation(bw.localTransform.getRotation());
bw.bone.setLocalScale(bw.localTransform.getScale());
}
private void computeBindTransforms(BoneWrapper boneWrapper, Skeleton skeleton) {
Bone bone = boneWrapper.bone;
tmpTransforms.fromTransformMatrix(boneWrapper.modelBindMatrix);
if (bone.getParent() != null) {
//root bone, model transforms are the same as the local transforms
//but for child bones we need to combine it with the parents inverse model transforms.
tmpMat.setTranslation(bone.getParent().getModelSpacePosition());
tmpMat.setRotationQuaternion(bone.getParent().getModelSpaceRotation());
tmpMat.setScale(bone.getParent().getModelSpaceScale());
tmpMat.invertLocal();
tmpTransforms2.fromTransformMatrix(tmpMat);
tmpTransforms.combineWithParent(tmpTransforms2);
}
bone.setBindTransforms(tmpTransforms.getTranslation(), tmpTransforms.getRotation(), tmpTransforms.getScale());
//resets the local transforms to bind transforms for all bones.
//then computes the model transforms from local transforms for each bone.
skeleton.resetAndUpdate();
skeleton.setBindingPose();
for (Integer childIndex : boneWrapper.children) {
BoneWrapper child = fetchFromCache("nodes", childIndex, BoneWrapper.class);
computeBindTransforms(child, skeleton);
}
}
private BoneWrapper findBoneWrapper(Bone bone) {
for (int i = 0; i < nodes.size(); i++) {
BoneWrapper bw = fetchFromCache("nodes", i, BoneWrapper.class);
if (bw != null && bw.bone == bone) {
return bw;
}
}
return null;
}
public Bone readNodeAsBone(int nodeIndex, int boneIndex, int skinIndex, Matrix4f modelBindMatrix) throws IOException {
BoneWrapper boneWrapper = fetchFromCache("nodes", nodeIndex, BoneWrapper.class);
if (boneWrapper != null) {
return boneWrapper.bone;
}
JsonObject nodeData = nodes.get(nodeIndex).getAsJsonObject();
String name = getAsString(nodeData, "name");
if (name == null) {
name = "Bone_" + nodeIndex;
}
Bone bone = new Bone(name);
Transform boneTransforms = null;
boneTransforms = readTransforms(nodeData);
addToCache("nodes", nodeIndex, new BoneWrapper(bone, boneIndex, skinIndex, modelBindMatrix, boneTransforms), nodes.size());
return bone;
}
private void findChildren(int nodeIndex) throws IOException {
BoneWrapper bw = fetchFromCache("nodes", nodeIndex, BoneWrapper.class);
JsonObject nodeData = nodes.get(nodeIndex).getAsJsonObject();
JsonArray children = nodeData.getAsJsonArray("children");
if (children != null) {
for (JsonElement child : children) {
int childIndex = child.getAsInt();
if (bw.children.contains(childIndex)) {
//bone already has the child in its children
continue;
}
BoneWrapper cbw = fetchFromCache("nodes", childIndex, BoneWrapper.class);
if (cbw != null) {
bw.bone.addChild(cbw.bone);
bw.children.add(childIndex);
} else {
//The child might be a Node
//Creating a dummy node to read the subgraph
Node n = new Node();
readChild(n, child);
Spatial s = n.getChild(0);
//removing the spatial from the dummy node, it will be attached to the attachment node of the bone
s.removeFromParent();
bw.attachedSpatial = s;
}
}
}
}
private void setupControls() {
for (SkinData skinData : skinnedSpatials.keySet()) {
List<Spatial> spatials = skinnedSpatials.get(skinData);
Spatial spatial = skinData.parent;
if (spatials.isEmpty()) {
continue;
}
if (spatials.size() >= 1) {
spatial = findCommonAncestor(spatials);
}
if (skinData.parent != null && spatial != skinData.parent) {
skinData.rootBoneTransformOffset = spatial.getWorldTransform().invert();
skinData.rootBoneTransformOffset.combineWithParent(skinData.parent.getWorldTransform());
}
if (skinData.animControl != null && skinData.animControl.getSpatial() == null) {
spatial.addControl(skinData.animControl);
}
spatial.addControl(skinData.skeletonControl);
}
for (int i = 0; i < nodes.size(); i++) {
BoneWrapper bw = fetchFromCache("nodes", i, BoneWrapper.class);
if (bw == null || bw.attachedSpatial == null) {
continue;
}
SkinData skinData = fetchFromCache("skins", bw.skinIndex, SkinData.class);
skinData.skeletonControl.getAttachmentsNode(bw.bone.getName()).attachChild(bw.attachedSpatial);
}
}
private String readMeshName(int meshIndex) {
JsonObject meshData = meshes.get(meshIndex).getAsJsonObject();
return getAsString(meshData, "name");
}
public <T> T fetchFromCache(String name, int index, Class<T> type) {
Object[] data = dataCache.get(name);
if (data == null) {
return null;
}
try {
T ret = type.cast(data[index]);
return ret;
} catch (ClassCastException e) {
return null;
}
}
public void addToCache(String name, int index, Object object, int maxLength) {
Object[] data = dataCache.get(name);
if (data == null) {
data = new Object[maxLength];
dataCache.put(name, data);
}
data[index] = object;
}
public AssetInfo getInfo() {
return info;
}
public JsonObject getDocRoot() {
return docRoot;
}
public Node getRootNode() {
return rootNode;
}
public static class WeightData {
float value;
short index;
int componentSize;
public WeightData(float value, short index, int componentSize) {
this.value = value;
this.index = index;
this.componentSize = componentSize;
}
}
private class BoneWrapper {
Bone bone;
int boneIndex;
int skinIndex;
Transform localTransform;
Transform localTransformOffset;
Matrix4f modelBindMatrix;
boolean isRoot = false;
boolean localUpdated = false;
Spatial attachedSpatial;
List<Integer> children = new ArrayList<>();
public BoneWrapper(Bone bone, int boneIndex, int skinIndex, Matrix4f modelBindMatrix, Transform localTransform) {
this.bone = bone;
this.boneIndex = boneIndex;
this.skinIndex = skinIndex;
this.modelBindMatrix = modelBindMatrix;
this.localTransform = localTransform;
this.localTransformOffset = localTransform.clone();
}
/**
* Applies the inverse Bind transforms to anim data. and the armature transforms if relevant.
*/
public void update(TrackData data) {
Transform bindTransforms = new Transform(bone.getBindPosition(), bone.getBindRotation(), bone.getBindScale());
SkinData skinData = fetchFromCache("skins", skinIndex, SkinData.class);
if (!localUpdated) {
//LocalTransform of the bone are default position to use for animations when there is no track.
//We need to transform them so that JME can us them in blendAnimTransform.
reverseBlendAnimTransforms(localTransformOffset, bindTransforms);
localUpdated = true;
}
for (int i = 0; i < data.getNbKeyFrames(); i++) {
Vector3f translation = getTranslation(data, i);
Quaternion rotation = getRotation(data, i);
Vector3f scale = getScale(data, i);
Transform t = new Transform(translation, rotation, scale);
if (isRoot && skinData.rootBoneTransformOffset != null) {
//Apply the armature transforms to the root bone anim track.
t.combineWithParent(skinData.rootBoneTransformOffset);
}
reverseBlendAnimTransforms(t, bindTransforms);
if (data.translations != null) {
data.translations[i] = t.getTranslation();
}
if (data.rotations != null) {
data.rotations[i] = t.getRotation();
}
if (data.scales != null) {
data.scales[i] = t.getScale();
}
}
data.ensureTranslationRotations(localTransformOffset);
}
private void reverseBlendAnimTransforms(Transform t, Transform bindTransforms) {
//This is wrong
//You'd normally combine those transforms with transform.combineWithParent()
//Here we actually do in reverse what JME does to combine anim transforms with bind transfoms (add trans/mult rot/ mult scale)
//The code to fix is in Bone.blendAnimTransforms
//TODO fix blendAnimTransforms
t.getTranslation().subtractLocal(bindTransforms.getTranslation());
t.getScale().divideLocal(bindTransforms.getScale());
tmpQuat.set(bindTransforms.getRotation()).inverseLocal().multLocal(t.getRotation());
t.setRotation(tmpQuat);
}
private Vector3f getTranslation(TrackData data, int i) {
Vector3f translation;
if (data.translations == null) {
translation = bone.getLocalPosition();
} else {
translation = data.translations[i];
}
return translation;
}
private Quaternion getRotation(TrackData data, int i) {
Quaternion rotation;
if (data.rotations == null) {
rotation = bone.getLocalRotation();
} else {
rotation = data.rotations[i];
}
return rotation;
}
private Vector3f getScale(TrackData data, int i) {
Vector3f scale;
if (data.scales == null) {
scale = bone.getLocalScale();
} else {
scale = data.scales[i];
}
return scale;
}
}
private class SkinData {
SkeletonControl skeletonControl;
AnimControl animControl;
Spatial parent;
Transform rootBoneTransformOffset;
Bone[] bones;
boolean used = false;
}
public static class SkinBuffers {
short[] joints;
float[] weights;
int componentSize;
public SkinBuffers(short[] joints, int componentSize) {
this.joints = joints;
this.componentSize = componentSize;
}
public SkinBuffers() {
}
}
private interface Populator<T> {
T populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException;
}
private class VertexBufferPopulator implements Populator<VertexBuffer> {
VertexBuffer.Type bufferType;
public VertexBufferPopulator(VertexBuffer.Type bufferType) {
this.bufferType = bufferType;
}
@Override
public VertexBuffer populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
if (bufferType == null) {
logger.log(Level.WARNING, "could not assign data to any VertexBuffer type for buffer view " + bufferViewIndex);
return null;
}
VertexBuffer vb = new VertexBuffer(bufferType);
VertexBuffer.Format format = getVertexBufferFormat(componentType);
VertexBuffer.Format originalFormat = format;
if (normalized) {
//Some float data can be packed into short buffers, "normalized" means they have to be unpacked.
//In that case the buffer is a FloatBuffer
format = VertexBuffer.Format.Float;
}
int numComponents = getNumberOfComponents(type);
Buffer buff = VertexBuffer.createBuffer(format, numComponents, count);
int bufferSize = numComponents * count;
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the buffer with zeros.
padBuffer(buff, bufferSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, buff, numComponents, originalFormat);
}
if (bufferType == VertexBuffer.Type.Index) {
numComponents = 3;
}
vb.setupData(VertexBuffer.Usage.Dynamic, numComponents, format, buff);
return vb;
}
}
private class FloatArrayPopulator implements Populator<float[]> {
@Override
public float[] populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
int numComponents = getNumberOfComponents(type);
int dataSize = numComponents * count;
float[] data = new float[dataSize];
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the data with zeros.
padBuffer(data, dataSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, data, numComponents, getVertexBufferFormat(componentType));
}
return data;
}
}
private class Vector3fArrayPopulator implements Populator<Vector3f[]> {
@Override
public Vector3f[] populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
int numComponents = getNumberOfComponents(type);
int dataSize = numComponents * count;
Vector3f[] data = new Vector3f[count];
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the data with zeros.
padBuffer(data, dataSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, data, numComponents, getVertexBufferFormat(componentType));
}
return data;
}
}
private class QuaternionArrayPopulator implements Populator<Quaternion[]> {
@Override
public Quaternion[] populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
int numComponents = getNumberOfComponents(type);
int dataSize = numComponents * count;
Quaternion[] data = new Quaternion[count];
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the data with zeros.
padBuffer(data, dataSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, data, numComponents, getVertexBufferFormat(componentType));
}
return data;
}
}
private class Matrix4fArrayPopulator implements Populator<Matrix4f[]> {
@Override
public Matrix4f[] populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
int numComponents = getNumberOfComponents(type);
int dataSize = numComponents * count;
Matrix4f[] data = new Matrix4f[count];
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the data with zeros.
padBuffer(data, dataSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, data, numComponents, getVertexBufferFormat(componentType));
}
return data;
}
}
private class JointArrayPopulator implements Populator<SkinBuffers> {
@Override
public SkinBuffers populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
int numComponents = getNumberOfComponents(type);
//can be bytes or shorts.
VertexBuffer.Format format = VertexBuffer.Format.Byte;
if (componentType == 5123) {
format = VertexBuffer.Format.Short;
}
int dataSize = numComponents * count;
short[] data = new short[dataSize];
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the data with zeros.
padBuffer(data, dataSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, data, numComponents, format);
}
return new SkinBuffers(data, format.getComponentSize());
}
}
}
|
jme3-plugins/src/gltf/java/com/jme3/scene/plugins/gltf/GltfLoader.java
|
package com.jme3.scene.plugins.gltf;
import com.google.gson.*;
import com.google.gson.stream.JsonReader;
import com.jme3.animation.*;
import com.jme3.asset.*;
import com.jme3.material.Material;
import com.jme3.material.RenderState;
import com.jme3.math.*;
import com.jme3.renderer.Camera;
import com.jme3.renderer.queue.RenderQueue;
import com.jme3.scene.*;
import com.jme3.scene.control.CameraControl;
import com.jme3.texture.Texture;
import com.jme3.texture.Texture2D;
import com.jme3.util.IntMap;
import com.jme3.util.mikktspace.MikktspaceTangentGenerator;
import javax.xml.bind.DatatypeConverter;
import java.io.*;
import java.nio.Buffer;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import static com.jme3.scene.plugins.gltf.GltfUtils.*;
/**
* GLTF 2.0 loader
* Created by Nehon on 07/08/2017.
*/
public class GltfLoader implements AssetLoader {
private static final Logger logger = Logger.getLogger(GltfLoader.class.getName());
//Data cache for already parsed JME objects
private Map<String, Object[]> dataCache = new HashMap<>();
private JsonArray scenes;
private JsonArray nodes;
private JsonArray meshes;
private JsonArray accessors;
private JsonArray bufferViews;
private JsonArray buffers;
private JsonArray materials;
private JsonArray textures;
private JsonArray images;
private JsonArray samplers;
private JsonArray animations;
private JsonArray skins;
private JsonArray cameras;
private Material defaultMat;
private AssetInfo info;
private JsonObject docRoot;
private Node rootNode;
private FloatArrayPopulator floatArrayPopulator = new FloatArrayPopulator();
private Vector3fArrayPopulator vector3fArrayPopulator = new Vector3fArrayPopulator();
private QuaternionArrayPopulator quaternionArrayPopulator = new QuaternionArrayPopulator();
private Matrix4fArrayPopulator matrix4fArrayPopulator = new Matrix4fArrayPopulator();
private static Map<String, MaterialAdapter> defaultMaterialAdapters = new HashMap<>();
private CustomContentManager customContentManager = new CustomContentManager();
private boolean useNormalsFlag = false;
private Quaternion tmpQuat = new Quaternion();
private Transform tmpTransforms = new Transform();
private Transform tmpTransforms2 = new Transform();
private Matrix4f tmpMat = new Matrix4f();
Map<SkinData, List<Spatial>> skinnedSpatials = new HashMap<>();
IntMap<SkinBuffers> skinBuffers = new IntMap<>();
static {
defaultMaterialAdapters.put("pbrMetallicRoughness", new PBRMetalRoughMaterialAdapter());
}
@Override
public Object load(AssetInfo assetInfo) throws IOException {
return loadFromStream(assetInfo, assetInfo.openStream());
}
protected Object loadFromStream(AssetInfo assetInfo, InputStream stream) throws IOException {
try {
dataCache.clear();
info = assetInfo;
skinnedSpatials.clear();
rootNode = new Node();
if (defaultMat == null) {
defaultMat = new Material(assetInfo.getManager(), "Common/MatDefs/Light/PBRLighting.j3md");
defaultMat.setColor("BaseColor", ColorRGBA.White);
defaultMat.setFloat("Metallic", 0f);
defaultMat.setFloat("Roughness", 1f);
}
docRoot = new JsonParser().parse(new JsonReader(new InputStreamReader(stream))).getAsJsonObject();
JsonObject asset = docRoot.getAsJsonObject().get("asset").getAsJsonObject();
String generator = getAsString(asset, "generator");
String version = getAsString(asset, "version");
String minVersion = getAsString(asset, "minVersion");
if (!isSupported(version, minVersion)) {
logger.log(Level.SEVERE, "Gltf Loader doesn't support this gltf version: " + version + (minVersion != null ? ("/" + minVersion) : ""));
}
scenes = docRoot.getAsJsonArray("scenes");
nodes = docRoot.getAsJsonArray("nodes");
meshes = docRoot.getAsJsonArray("meshes");
accessors = docRoot.getAsJsonArray("accessors");
bufferViews = docRoot.getAsJsonArray("bufferViews");
buffers = docRoot.getAsJsonArray("buffers");
materials = docRoot.getAsJsonArray("materials");
textures = docRoot.getAsJsonArray("textures");
images = docRoot.getAsJsonArray("images");
samplers = docRoot.getAsJsonArray("samplers");
animations = docRoot.getAsJsonArray("animations");
skins = docRoot.getAsJsonArray("skins");
cameras = docRoot.getAsJsonArray("cameras");
customContentManager.init(this);
readSkins();
readCameras();
JsonPrimitive defaultScene = docRoot.getAsJsonPrimitive("scene");
readScenes(defaultScene, rootNode);
rootNode = customContentManager.readExtensionAndExtras("root", docRoot, rootNode);
setupControls();
//Loading animations
if (animations != null) {
for (int i = 0; i < animations.size(); i++) {
readAnimation(i);
}
}
//only one scene let's not return the root.
if (rootNode.getChildren().size() == 1) {
rootNode = (Node) rootNode.getChild(0);
}
//no name for the scene... let's set the file name.
if (rootNode.getName() == null) {
rootNode.setName(assetInfo.getKey().getName());
}
return rootNode;
} catch (Exception e) {
throw new AssetLoadException("An error occurred loading " + assetInfo.getKey().getName(), e);
} finally {
stream.close();
}
}
private void setDefaultParams(Material mat) {
mat.setColor("BaseColor", ColorRGBA.White);
mat.setFloat("Metallic", 0f);
mat.setFloat("Roughness", 1f);
}
private boolean isSupported(String version, String minVersion) {
return "2.0".equals(version);
}
public void readScenes(JsonPrimitive defaultScene, Node rootNode) throws IOException {
if (scenes == null) {
//no scene... lets handle this later...
throw new AssetLoadException("Gltf files with no scene is not yet supported");
}
for (JsonElement scene : scenes) {
Node sceneNode = new Node();
//specs says that only the default scene should be rendered,
// if there are several scenes, they are attached to the rootScene, but they are culled
sceneNode.setCullHint(Spatial.CullHint.Always);
sceneNode.setName(getAsString(scene.getAsJsonObject(), "name"));
JsonArray sceneNodes = scene.getAsJsonObject().getAsJsonArray("nodes");
sceneNode = customContentManager.readExtensionAndExtras("scene", scene, sceneNode);
rootNode.attachChild(sceneNode);
for (JsonElement node : sceneNodes) {
readChild(sceneNode, node);
}
}
//Setting the default scene cul hint to inherit.
int activeChild = 0;
if (defaultScene != null) {
activeChild = defaultScene.getAsInt();
}
rootNode.getChild(activeChild).setCullHint(Spatial.CullHint.Inherit);
}
public Object readNode(int nodeIndex) throws IOException {
Object obj = fetchFromCache("nodes", nodeIndex, Object.class);
if (obj != null) {
if (obj instanceof BoneWrapper) {
//the node can be a previously loaded bone let's return it
return obj;
} else {
//If a spatial is referenced several times, it may be attached to different parents,
// and it's not possible in JME, so we have to clone it.
return ((Spatial) obj).clone();
}
}
Spatial spatial;
JsonObject nodeData = nodes.get(nodeIndex).getAsJsonObject();
JsonArray children = nodeData.getAsJsonArray("children");
Integer meshIndex = getAsInteger(nodeData, "mesh");
if (meshIndex != null) {
assertNotNull(meshes, "Can't find any mesh data, yet a node references a mesh");
//there is a mesh in this node, however gltf can split meshes in primitives (some kind of sub meshes),
//We don't have this in JME so we have to make one mesh and one Geometry for each primitive.
Geometry[] primitives = readMeshPrimitives(meshIndex);
if (primitives.length == 1 && children == null) {
//only one geometry, let's not wrap it in another node unless the node has children.
spatial = primitives[0];
} else {
//several geometries, let's make a parent Node and attach them to it
Node node = new Node();
for (Geometry primitive : primitives) {
node.attachChild(primitive);
}
spatial = node;
}
spatial.setName(readMeshName(meshIndex));
} else {
//no mesh, we have a node. Can be a camera node or a regular node.
Integer camIndex = getAsInteger(nodeData, "camera");
if (camIndex != null) {
Camera cam = fetchFromCache("cameras", camIndex, Camera.class);
CameraNode node = new CameraNode(null, cam);
node.setControlDir(CameraControl.ControlDirection.SpatialToCamera);
spatial = node;
} else {
Node node = new Node();
spatial = node;
}
}
Integer skinIndex = getAsInteger(nodeData, "skin");
if (skinIndex != null) {
SkinData skinData = fetchFromCache("skins", skinIndex, SkinData.class);
List<Spatial> spatials = skinnedSpatials.get(skinData);
spatials.add(spatial);
skinData.used = true;
}
spatial.setLocalTransform(readTransforms(nodeData));
if (spatial.getName() == null) {
spatial.setName(getAsString(nodeData.getAsJsonObject(), "name"));
}
spatial = customContentManager.readExtensionAndExtras("node", nodeData, spatial);
addToCache("nodes", nodeIndex, spatial, nodes.size());
return spatial;
}
private void readChild(Spatial parent, JsonElement nodeIndex) throws IOException {
Object loaded = readNode(nodeIndex.getAsInt());
if (loaded instanceof Spatial) {
Spatial spatial = ((Spatial) loaded);
((Node) parent).attachChild(spatial);
JsonObject nodeElem = nodes.get(nodeIndex.getAsInt()).getAsJsonObject();
JsonArray children = nodeElem.getAsJsonArray("children");
if (children != null) {
for (JsonElement child : children) {
readChild(spatial, child);
}
}
} else if (loaded instanceof BoneWrapper) {
//parent is the Armature Node, we have to apply its transforms to the root bone's animation data
BoneWrapper bw = (BoneWrapper) loaded;
bw.isRoot = true;
SkinData skinData = fetchFromCache("skins", bw.skinIndex, SkinData.class);
if (skinData == null) {
return;
}
skinData.parent = parent;
}
}
public Transform readTransforms(JsonObject nodeData) {
Transform transform = new Transform();
JsonArray matrix = nodeData.getAsJsonArray("matrix");
if (matrix != null) {
//transforms are given as a mat4
float[] tmpArray = new float[16];
for (int i = 0; i < tmpArray.length; i++) {
tmpArray[i] = matrix.get(i).getAsFloat();
}
//creates a row major matrix from color major data
Matrix4f mat = new Matrix4f(tmpArray);
transform.fromTransformMatrix(mat);
return transform;
}
//no matrix transforms: no transforms or transforms givens as translation/rotation/scale
JsonArray translation = nodeData.getAsJsonArray("translation");
if (translation != null) {
transform.setTranslation(
translation.get(0).getAsFloat(),
translation.get(1).getAsFloat(),
translation.get(2).getAsFloat());
}
JsonArray rotation = nodeData.getAsJsonArray("rotation");
if (rotation != null) {
transform.setRotation(new Quaternion(
rotation.get(0).getAsFloat(),
rotation.get(1).getAsFloat(),
rotation.get(2).getAsFloat(),
rotation.get(3).getAsFloat()));
}
JsonArray scale = nodeData.getAsJsonArray("scale");
if (scale != null) {
transform.setScale(
scale.get(0).getAsFloat(),
scale.get(1).getAsFloat(),
scale.get(2).getAsFloat());
}
return transform;
}
public Geometry[] readMeshPrimitives(int meshIndex) throws IOException {
Geometry[] geomArray = (Geometry[]) fetchFromCache("meshes", meshIndex, Object.class);
if (geomArray != null) {
//cloning the geoms.
Geometry[] geoms = new Geometry[geomArray.length];
for (int i = 0; i < geoms.length; i++) {
geoms[i] = geomArray[i].clone(false);
}
return geoms;
}
JsonObject meshData = meshes.get(meshIndex).getAsJsonObject();
JsonArray primitives = meshData.getAsJsonArray("primitives");
assertNotNull(primitives, "Can't find any primitives in mesh " + meshIndex);
String name = getAsString(meshData, "name");
geomArray = new Geometry[primitives.size()];
int index = 0;
for (JsonElement primitive : primitives) {
JsonObject meshObject = primitive.getAsJsonObject();
Mesh mesh = new Mesh();
Integer mode = getAsInteger(meshObject, "mode");
mesh.setMode(getMeshMode(mode));
Integer indices = getAsInteger(meshObject, "indices");
if (indices != null) {
mesh.setBuffer(readAccessorData(indices, new VertexBufferPopulator(VertexBuffer.Type.Index)));
}
JsonObject attributes = meshObject.getAsJsonObject("attributes");
assertNotNull(attributes, "No attributes defined for mesh " + mesh);
skinBuffers.clear();
for (Map.Entry<String, JsonElement> entry : attributes.entrySet()) {
//special case for joints and weights buffer. If there are more than 4 bones per vertex, there might be several of them
//we need to read them all and to keep only the 4 that have the most weight on the vertex.
String bufferType = entry.getKey();
if (bufferType.startsWith("JOINTS")) {
SkinBuffers buffs = getSkinBuffers(bufferType);
SkinBuffers buffer = readAccessorData(entry.getValue().getAsInt(), new JointArrayPopulator());
buffs.joints = buffer.joints;
buffs.componentSize = buffer.componentSize;
} else if (bufferType.startsWith("WEIGHTS")) {
SkinBuffers buffs = getSkinBuffers(bufferType);
buffs.weights = readAccessorData(entry.getValue().getAsInt(), new FloatArrayPopulator());
} else {
VertexBuffer vb = readAccessorData(entry.getValue().getAsInt(), new VertexBufferPopulator(getVertexBufferType(bufferType)));
if (vb != null) {
mesh.setBuffer(vb);
}
}
}
handleSkinningBuffers(mesh, skinBuffers);
if (mesh.getBuffer(VertexBuffer.Type.BoneIndex) != null) {
//the mesh has some skinning let's create needed buffers for HW skinning
//creating empty buffers for HW skinning
//the buffers will be setup if ever used.
VertexBuffer weightsHW = new VertexBuffer(VertexBuffer.Type.HWBoneWeight);
VertexBuffer indicesHW = new VertexBuffer(VertexBuffer.Type.HWBoneIndex);
//setting usage to cpuOnly so that the buffer is not send empty to the GPU
indicesHW.setUsage(VertexBuffer.Usage.CpuOnly);
weightsHW.setUsage(VertexBuffer.Usage.CpuOnly);
mesh.setBuffer(weightsHW);
mesh.setBuffer(indicesHW);
mesh.generateBindPose();
}
mesh = customContentManager.readExtensionAndExtras("primitive", meshObject, mesh);
Geometry geom = new Geometry(null, mesh);
Integer materialIndex = getAsInteger(meshObject, "material");
if (materialIndex == null) {
geom.setMaterial(defaultMat);
} else {
useNormalsFlag = false;
geom.setMaterial(readMaterial(materialIndex));
if (geom.getMaterial().getAdditionalRenderState().getBlendMode() == RenderState.BlendMode.Alpha) {
//Alpha blending is on on this material let's place the geom in the transparent bucket
geom.setQueueBucket(RenderQueue.Bucket.Transparent);
}
if (useNormalsFlag && mesh.getBuffer(VertexBuffer.Type.Tangent) == null) {
//No tangent buffer, but there is a normal map, we have to generate them using MiiktSpace
MikktspaceTangentGenerator.generate(geom);
}
}
if (name != null) {
geom.setName(name + (primitives.size() > 1 ? ("_" + index) : ""));
}
geom.updateModelBound();
geomArray[index] = geom;
index++;
//TODO targets(morph anim...)
}
geomArray = customContentManager.readExtensionAndExtras("mesh", meshData, geomArray);
addToCache("meshes", meshIndex, geomArray, meshes.size());
return geomArray;
}
private SkinBuffers getSkinBuffers(String bufferType) {
int bufIndex = getIndex(bufferType);
SkinBuffers buffs = skinBuffers.get(bufIndex);
if (buffs == null) {
buffs = new SkinBuffers();
skinBuffers.put(bufIndex, buffs);
}
return buffs;
}
public <R> R readAccessorData(int accessorIndex, Populator<R> populator) throws IOException {
assertNotNull(accessors, "No accessor attribute in the gltf file");
JsonObject accessor = accessors.get(accessorIndex).getAsJsonObject();
Integer bufferViewIndex = getAsInteger(accessor, "bufferView");
int byteOffset = getAsInteger(accessor, "byteOffset", 0);
Integer componentType = getAsInteger(accessor, "componentType");
assertNotNull(componentType, "No component type defined for accessor " + accessorIndex);
Integer count = getAsInteger(accessor, "count");
assertNotNull(count, "No count attribute defined for accessor " + accessorIndex);
String type = getAsString(accessor, "type");
assertNotNull(type, "No type attribute defined for accessor " + accessorIndex);
boolean normalized = getAsBoolean(accessor, "normalized", false);
//TODO min / max...don't know what to do about them.
//TODO sparse
R data = populator.populate(bufferViewIndex, componentType, type, count, byteOffset, normalized);
data = customContentManager.readExtensionAndExtras("accessor", accessor, data);
return data;
}
public Object readBuffer(Integer bufferViewIndex, int byteOffset, int count, Object store, int numComponents, VertexBuffer.Format format) throws IOException {
JsonObject bufferView = bufferViews.get(bufferViewIndex).getAsJsonObject();
Integer bufferIndex = getAsInteger(bufferView, "buffer");
assertNotNull(bufferIndex, "No buffer defined for bufferView " + bufferViewIndex);
int bvByteOffset = getAsInteger(bufferView, "byteOffset", 0);
Integer byteLength = getAsInteger(bufferView, "byteLength");
assertNotNull(byteLength, "No byte length defined for bufferView " + bufferViewIndex);
int byteStride = getAsInteger(bufferView, "byteStride", 0);
//target defines ELEMENT_ARRAY_BUFFER or ARRAY_BUFFER, but we already know that since we know we load the indexbuffer or any other...
//not sure it's useful for us, but I guess it's useful when you map data directly to the GPU.
//int target = getAsInteger(bufferView, "target", 0);
byte[] data = readData(bufferIndex);
data = customContentManager.readExtensionAndExtras("bufferView", bufferView, data);
if (store == null) {
store = new byte[byteLength];
}
if (count == -1) {
count = byteLength;
}
populateBuffer(store, data, count, byteOffset + bvByteOffset, byteStride, numComponents, format);
return store;
}
public byte[] readData(int bufferIndex) throws IOException {
assertNotNull(buffers, "No buffer defined");
JsonObject buffer = buffers.get(bufferIndex).getAsJsonObject();
String uri = getAsString(buffer, "uri");
Integer bufferLength = getAsInteger(buffer, "byteLength");
assertNotNull(bufferLength, "No byteLength defined for buffer " + bufferIndex);
byte[] data = (byte[]) fetchFromCache("buffers", bufferIndex, Object.class);
if (data != null) {
return data;
}
data = getBytes(bufferIndex, uri, bufferLength);
data = customContentManager.readExtensionAndExtras("buffer", buffer, data);
addToCache("buffers", bufferIndex, data, buffers.size());
return data;
}
protected byte[] getBytes(int bufferIndex, String uri, Integer bufferLength) throws IOException {
byte[] data;
if (uri != null) {
if (uri.startsWith("data:")) {
//base 64 embed data
data = DatatypeConverter.parseBase64Binary(uri.substring(uri.indexOf(",") + 1));
} else {
//external file let's load it
if (!uri.endsWith(".bin")) {
throw new AssetLoadException("Cannot load " + uri + ", a .bin extension is required.");
}
BinDataKey key = new BinDataKey(info.getKey().getFolder() + uri);
InputStream input = (InputStream) info.getManager().loadAsset(key);
data = new byte[bufferLength];
DataInputStream dataStream = new DataInputStream(input);
dataStream.readFully(data);
dataStream.close();
}
} else {
//no URI this should not happen in a gltf file, only in glb files.
throw new AssetLoadException("Buffer " + bufferIndex + " has no uri");
}
return data;
}
public Material readMaterial(int materialIndex) throws IOException {
assertNotNull(materials, "There is no material defined yet a mesh references one");
JsonObject matData = materials.get(materialIndex).getAsJsonObject();
JsonObject pbrMat = matData.getAsJsonObject("pbrMetallicRoughness");
MaterialAdapter adapter = null;
if (pbrMat != null) {
adapter = getAdapterForMaterial(info, "pbrMetallicRoughness");
if (adapter == null) {
adapter = defaultMaterialAdapters.get("pbrMetallicRoughness");
}
adapter.init(info.getManager());
}
adapter = customContentManager.readExtensionAndExtras("material", matData, adapter);
if (adapter == null) {
logger.log(Level.WARNING, "Couldn't find any matching material definition for material " + materialIndex);
adapter = defaultMaterialAdapters.get("pbrMetallicRoughness");
adapter.init(info.getManager());
setDefaultParams(adapter.getMaterial());
}
if (pbrMat != null) {
adapter.setParam("baseColorFactor", getAsColor(pbrMat, "baseColorFactor", ColorRGBA.White));
adapter.setParam("metallicFactor", getAsFloat(pbrMat, "metallicFactor", 1f));
adapter.setParam("roughnessFactor", getAsFloat(pbrMat, "roughnessFactor", 1f));
adapter.setParam("baseColorTexture", readTexture(pbrMat.getAsJsonObject("baseColorTexture")));
adapter.setParam("metallicRoughnessTexture", readTexture(pbrMat.getAsJsonObject("metallicRoughnessTexture")));
}
adapter.getMaterial().setName(getAsString(matData, "name"));
adapter.setParam("emissiveFactor", getAsColor(matData, "emissiveFactor", ColorRGBA.Black));
String alphaMode = getAsString(matData, "alphaMode");
adapter.setParam("alphaMode", alphaMode);
if (alphaMode != null && alphaMode.equals("MASK")) {
adapter.setParam("alphaCutoff", getAsFloat(matData, "alphaCutoff"));
}
adapter.setParam("doubleSided", getAsBoolean(matData, "doubleSided"));
Texture2D normal = readTexture(matData.getAsJsonObject("normalTexture"));
adapter.setParam("normalTexture", normal);
if (normal != null) {
useNormalsFlag = true;
}
adapter.setParam("occlusionTexture", readTexture(matData.getAsJsonObject("occlusionTexture")));
adapter.setParam("emissiveTexture", readTexture(matData.getAsJsonObject("emissiveTexture")));
return adapter.getMaterial();
}
public void readCameras() throws IOException {
if (cameras == null) {
return;
}
for (int i = 0; i < cameras.size(); i++) {
//Can't access resolution here... actually it's a shame we can't access settings from anywhere.
//users will have to call resize ont he camera.
Camera cam = new Camera(1, 1);
JsonObject camObj = cameras.get(i).getAsJsonObject();
String type = getAsString(camObj, "type");
assertNotNull(type, "No type defined ofr camera");
JsonObject camData = camObj.getAsJsonObject(type);
if (type.equals("perspective")) {
float aspectRatio = getAsFloat(camData, "aspectRation", 1f);
Float yfov = getAsFloat(camData, "yfov");
assertNotNull(yfov, "No yfov for perspective camera");
Float znear = getAsFloat(camData, "znear");
assertNotNull(znear, "No znear for perspective camere");
Float zfar = getAsFloat(camData, "zfar", znear * 1000f);
cam.setFrustumPerspective(yfov * FastMath.RAD_TO_DEG, aspectRatio, znear, zfar);
cam = customContentManager.readExtensionAndExtras("camera.perspective", camData, cam);
} else {
Float xmag = getAsFloat(camData, "xmag");
assertNotNull(xmag, "No xmag for orthographic camera");
Float ymag = getAsFloat(camData, "ymag");
assertNotNull(ymag, "No ymag for orthographic camera");
Float znear = getAsFloat(camData, "znear");
assertNotNull(znear, "No znear for orthographic camere");
Float zfar = getAsFloat(camData, "zfar", znear * 1000f);
assertNotNull(zfar, "No zfar for orthographic camera");
cam.setParallelProjection(true);
cam.setFrustum(znear, zfar, -xmag, xmag, ymag, -ymag);
cam = customContentManager.readExtensionAndExtras("camera.orthographic", camData, cam);
}
cam = customContentManager.readExtensionAndExtras("camera", camObj, cam);
addToCache("cameras", i, cam, cameras.size());
}
}
public Texture2D readTexture(JsonObject texture) throws IOException {
return readTexture(texture, false);
}
public Texture2D readTexture(JsonObject texture, boolean flip) throws IOException {
if (texture == null) {
return null;
}
Integer textureIndex = getAsInteger(texture, "index");
assertNotNull(textureIndex, "Texture as no index");
assertNotNull(textures, "There are no textures, yet one is referenced by a material");
JsonObject textureData = textures.get(textureIndex).getAsJsonObject();
Integer sourceIndex = getAsInteger(textureData, "source");
Integer samplerIndex = getAsInteger(textureData, "sampler");
Texture2D texture2d = readImage(sourceIndex, flip);
if (samplerIndex != null) {
texture2d = readSampler(samplerIndex, texture2d);
} else {
texture2d.setWrap(Texture.WrapMode.Repeat);
}
texture2d = customContentManager.readExtensionAndExtras("texture", texture, texture2d);
return texture2d;
}
public Texture2D readImage(int sourceIndex, boolean flip) throws IOException {
if (images == null) {
throw new AssetLoadException("No image defined");
}
JsonObject image = images.get(sourceIndex).getAsJsonObject();
String uri = getAsString(image, "uri");
Integer bufferView = getAsInteger(image, "bufferView");
String mimeType = getAsString(image, "mimeType");
Texture2D result;
if (uri == null) {
assertNotNull(bufferView, "Image " + sourceIndex + " should either have an uri or a bufferView");
assertNotNull(mimeType, "Image " + sourceIndex + " should have a mimeType");
byte[] data = (byte[]) readBuffer(bufferView, 0, -1, null, 1, VertexBuffer.Format.Byte);
String extension = mimeType.split("/")[1];
TextureKey key = new TextureKey("image" + sourceIndex + "." + extension, flip);
result = (Texture2D) info.getManager().loadAssetFromStream(key, new ByteArrayInputStream(data));
} else if (uri.startsWith("data:")) {
//base64 encoded image
String[] uriInfo = uri.split(",");
byte[] data = DatatypeConverter.parseBase64Binary(uriInfo[1]);
String headerInfo = uriInfo[0].split(";")[0];
String extension = headerInfo.split("/")[1];
TextureKey key = new TextureKey("image" + sourceIndex + "." + extension, flip);
result = (Texture2D) info.getManager().loadAssetFromStream(key, new ByteArrayInputStream(data));
} else {
//external file image
TextureKey key = new TextureKey(info.getKey().getFolder() + uri, flip);
Texture tex = info.getManager().loadTexture(key);
result = (Texture2D) tex;
}
return result;
}
public void readAnimation(int animationIndex) throws IOException {
JsonObject animation = animations.get(animationIndex).getAsJsonObject();
JsonArray channels = animation.getAsJsonArray("channels");
JsonArray samplers = animation.getAsJsonArray("samplers");
String name = getAsString(animation, "name");
assertNotNull(channels, "No channels for animation " + name);
assertNotNull(samplers, "No samplers for animation " + name);
//temp data storage of track data
TrackData[] tracks = new TrackData[nodes.size()];
for (JsonElement channel : channels) {
JsonObject target = channel.getAsJsonObject().getAsJsonObject("target");
Integer targetNode = getAsInteger(target, "node");
String targetPath = getAsString(target, "path");
if (targetNode == null) {
//no target node for the channel, specs say to ignore the channel.
continue;
}
assertNotNull(targetPath, "No target path for channel");
if (targetPath.equals("weight")) {
//Morph animation, not implemented in JME, let's warn the user and skip the channel
logger.log(Level.WARNING, "Morph animation is not supported by JME yet, skipping animation");
continue;
}
TrackData trackData = tracks[targetNode];
if (trackData == null) {
trackData = new TrackData();
tracks[targetNode] = trackData;
}
Integer samplerIndex = getAsInteger(channel.getAsJsonObject(), "sampler");
assertNotNull(samplerIndex, "No animation sampler provided for channel");
JsonObject sampler = samplers.get(samplerIndex).getAsJsonObject();
Integer timeIndex = getAsInteger(sampler, "input");
assertNotNull(timeIndex, "No input accessor Provided for animation sampler");
Integer dataIndex = getAsInteger(sampler, "output");
assertNotNull(dataIndex, "No output accessor Provided for animation sampler");
String interpolation = getAsString(sampler, "interpolation");
if (interpolation == null || !interpolation.equals("LINEAR")) {
//JME anim system only supports Linear interpolation (will be possible with monkanim though)
//TODO rework this once monkanim is core, or allow a hook for animation loading to fit custom animation systems
logger.log(Level.WARNING, "JME only supports linear interpolation for animations");
}
trackData = customContentManager.readExtensionAndExtras("animation.sampler", sampler, trackData);
float[] times = fetchFromCache("accessors", timeIndex, float[].class);
if (times == null) {
times = readAccessorData(timeIndex, floatArrayPopulator);
addToCache("accessors", timeIndex, times, accessors.size());
}
if (targetPath.equals("translation")) {
trackData.timeArrays.add(new TrackData.TimeData(times, TrackData.Type.Translation));
Vector3f[] translations = readAccessorData(dataIndex, vector3fArrayPopulator);
trackData.translations = translations;
} else if (targetPath.equals("scale")) {
trackData.timeArrays.add(new TrackData.TimeData(times, TrackData.Type.Scale));
Vector3f[] scales = readAccessorData(dataIndex, vector3fArrayPopulator);
trackData.scales = scales;
} else if (targetPath.equals("rotation")) {
trackData.timeArrays.add(new TrackData.TimeData(times, TrackData.Type.Rotation));
Quaternion[] rotations = readAccessorData(dataIndex, quaternionArrayPopulator);
trackData.rotations = rotations;
} else {
//TODO support weights
logger.log(Level.WARNING, "Morph animation is not supported");
continue;
}
tracks[targetNode] = customContentManager.readExtensionAndExtras("channel", channel, trackData);
}
if (name == null) {
name = "anim_" + animationIndex;
}
List<Spatial> spatials = new ArrayList<>();
Animation anim = new Animation();
anim.setName(name);
int skinIndex = -1;
List<Bone> usedBones = new ArrayList<>();
for (int i = 0; i < tracks.length; i++) {
TrackData trackData = tracks[i];
if (trackData == null || trackData.timeArrays.isEmpty()) {
continue;
}
trackData.update();
if (trackData.length > anim.getLength()) {
anim.setLength(trackData.length);
}
Object node = fetchFromCache("nodes", i, Object.class);
if (node instanceof Spatial) {
Spatial s = (Spatial) node;
spatials.add(s);
SpatialTrack track = new SpatialTrack(trackData.times, trackData.translations, trackData.rotations, trackData.scales);
track.setTrackSpatial(s);
anim.addTrack(track);
} else if (node instanceof BoneWrapper) {
BoneWrapper b = (BoneWrapper) node;
//apply the inverseBindMatrix to animation data.
b.update(trackData);
usedBones.add(b.bone);
if (skinIndex == -1) {
skinIndex = b.skinIndex;
} else {
//Check if all bones affected by this animation are from the same skin, the track will be skipped.
if (skinIndex != b.skinIndex) {
logger.log(Level.WARNING, "Animation " + animationIndex + " (" + name + ") applies to bones that are not from the same skin: skin " + skinIndex + ", bone " + b.bone.getName() + " from skin " + b.skinIndex);
continue;
}
}
BoneTrack track = new BoneTrack(b.boneIndex, trackData.times, trackData.translations, trackData.rotations, trackData.scales);
anim.addTrack(track);
}
}
// Check each bone to see if their local pose is different from their bind pose.
// If it is, we ensure that the bone has an animation track, else JME way of applying anim transforms will apply the bind pose to those bones,
// instead of the local pose that is supposed to be the default
if (skinIndex != -1) {
SkinData skin = fetchFromCache("skins", skinIndex, SkinData.class);
Skeleton skeleton = skin.skeletonControl.getSkeleton();
for (Bone bone : skin.bones) {
if (!usedBones.contains(bone) && !equalBindAndLocalTransforms(bone)) {
//create a track
float[] times = new float[]{0, anim.getLength()};
Vector3f t = bone.getLocalPosition().subtract(bone.getBindPosition());
Quaternion r = tmpQuat.set(bone.getBindRotation()).inverse().multLocal(bone.getLocalRotation());
Vector3f s = bone.getLocalScale().divide(bone.getBindScale());
Vector3f[] translations = new Vector3f[]{t, t};
Quaternion[] rotations = new Quaternion[]{r, r};
Vector3f[] scales = new Vector3f[]{s, s};
int boneIndex = skeleton.getBoneIndex(bone);
BoneTrack track = new BoneTrack(boneIndex, times, translations, rotations, scales);
anim.addTrack(track);
}
}
}
anim = customContentManager.readExtensionAndExtras("animations", animation, anim);
if (skinIndex != -1) {
//we have a bone animation.
SkinData skin = fetchFromCache("skins", skinIndex, SkinData.class);
skin.animControl.addAnim(anim);
}
if (!spatials.isEmpty()) {
if (skinIndex != -1) {
//there are some spatial tracks in this bone animation... or the other way around. Let's add the spatials in the skinnedSpatials.
SkinData skin = fetchFromCache("skins", skinIndex, SkinData.class);
List<Spatial> spat = skinnedSpatials.get(skin);
spat.addAll(spatials);
//the animControl will be added in the setupControls();
} else {
//Spatial animation
Spatial spatial = null;
if (spatials.size() == 1) {
spatial = spatials.get(0);
} else {
spatial = findCommonAncestor(spatials);
}
AnimControl control = spatial.getControl(AnimControl.class);
if (control == null) {
control = new AnimControl();
spatial.addControl(control);
}
control.addAnim(anim);
}
}
}
public Texture2D readSampler(int samplerIndex, Texture2D texture) throws IOException {
if (samplers == null) {
throw new AssetLoadException("No samplers defined");
}
JsonObject sampler = samplers.get(samplerIndex).getAsJsonObject();
Texture.MagFilter magFilter = getMagFilter(getAsInteger(sampler, "magFilter"));
Texture.MinFilter minFilter = getMinFilter(getAsInteger(sampler, "minFilter"));
Texture.WrapMode wrapS = getWrapMode(getAsInteger(sampler, "wrapS"));
Texture.WrapMode wrapT = getWrapMode(getAsInteger(sampler, "wrapT"));
if (magFilter != null) {
texture.setMagFilter(magFilter);
}
if (minFilter != null) {
texture.setMinFilter(minFilter);
}
texture.setWrap(Texture.WrapAxis.S, wrapS);
texture.setWrap(Texture.WrapAxis.T, wrapT);
texture = customContentManager.readExtensionAndExtras("texture.sampler", sampler, texture);
return texture;
}
public void readSkins() throws IOException {
if (skins == null) {
//no skins, no bone animation.
return;
}
for (int index = 0; index < skins.size(); index++) {
JsonObject skin = skins.get(index).getAsJsonObject();
//Note that the "skeleton" index is intentionally ignored.
//It's not mandatory and exporters tends to mix up how it should be used because the specs are not clear.
//Anyway we have other means to detect both armature structures and root bones.
JsonArray joints = skin.getAsJsonArray("joints");
assertNotNull(joints, "No joints defined for skin");
//These inverse bind matrices, once inverted again, will give us the real bind pose of the bones (in model space),
//since the skeleton in not guaranteed to be exported in bind pose.
Integer matricesIndex = getAsInteger(skin, "inverseBindMatrices");
Matrix4f[] inverseBindMatrices = null;
if (matricesIndex != null) {
inverseBindMatrices = readAccessorData(matricesIndex, matrix4fArrayPopulator);
} else {
inverseBindMatrices = new Matrix4f[joints.size()];
for (int i = 0; i < inverseBindMatrices.length; i++) {
inverseBindMatrices[i] = new Matrix4f();
}
}
Bone[] bones = new Bone[joints.size()];
for (int i = 0; i < joints.size(); i++) {
int boneIndex = joints.get(i).getAsInt();
//we don't need the inverse bind matrix, we need the bind matrix so let's invert it.
Matrix4f modelBindMatrix = inverseBindMatrices[i].invertLocal();
bones[i] = readNodeAsBone(boneIndex, i, index, modelBindMatrix);
}
for (int i = 0; i < joints.size(); i++) {
findChildren(joints.get(i).getAsInt());
}
Skeleton skeleton = new Skeleton(bones);
//Compute bind transforms. We need to do it from root bone to leaves bone.
for (Bone bone : skeleton.getRoots()) {
BoneWrapper bw = findBoneWrapper(bone);
computeBindTransforms(bw, skeleton);
}
skeleton = customContentManager.readExtensionAndExtras("skin", skin, skeleton);
SkinData skinData = new SkinData();
skinData.bones = bones;
skinData.skeletonControl = new SkeletonControl(skeleton);
skinData.animControl = new AnimControl(skinData.skeletonControl.getSkeleton());
addToCache("skins", index, skinData, nodes.size());
skinnedSpatials.put(skinData, new ArrayList<Spatial>());
// Set local transforms.
// The skeleton may come in a given pose, that is not the rest pose, so let 's apply it.
// We will need it later for animation
for (int i = 0; i < joints.size(); i++) {
applyPose(joints.get(i).getAsInt());
}
skeleton.updateWorldVectors();
//If the user didn't ask to keep the pose we reset the skeleton user control
if (!isKeepSkeletonPose(info)) {
for (Bone bone : bones) {
bone.setUserControl(false);
}
}
}
}
private void applyPose(int index) {
BoneWrapper bw = fetchFromCache("nodes", index, BoneWrapper.class);
bw.bone.setUserControl(true);
bw.bone.setLocalTranslation(bw.localTransform.getTranslation());
bw.bone.setLocalRotation(bw.localTransform.getRotation());
bw.bone.setLocalScale(bw.localTransform.getScale());
}
private void computeBindTransforms(BoneWrapper boneWrapper, Skeleton skeleton) {
Bone bone = boneWrapper.bone;
tmpTransforms.fromTransformMatrix(boneWrapper.modelBindMatrix);
if (bone.getParent() != null) {
//root bone, model transforms are the same as the local transforms
//but for child bones we need to combine it with the parents inverse model transforms.
tmpMat.setTranslation(bone.getParent().getModelSpacePosition());
tmpMat.setRotationQuaternion(bone.getParent().getModelSpaceRotation());
tmpMat.setScale(bone.getParent().getModelSpaceScale());
tmpMat.invertLocal();
tmpTransforms2.fromTransformMatrix(tmpMat);
tmpTransforms.combineWithParent(tmpTransforms2);
}
bone.setBindTransforms(tmpTransforms.getTranslation(), tmpTransforms.getRotation(), tmpTransforms.getScale());
//resets the local transforms to bind transforms for all bones.
//then computes the model transforms from local transforms for each bone.
skeleton.resetAndUpdate();
skeleton.setBindingPose();
for (Integer childIndex : boneWrapper.children) {
BoneWrapper child = fetchFromCache("nodes", childIndex, BoneWrapper.class);
computeBindTransforms(child, skeleton);
}
}
private BoneWrapper findBoneWrapper(Bone bone) {
for (int i = 0; i < nodes.size(); i++) {
BoneWrapper bw = fetchFromCache("nodes", i, BoneWrapper.class);
if (bw != null && bw.bone == bone) {
return bw;
}
}
return null;
}
public Bone readNodeAsBone(int nodeIndex, int boneIndex, int skinIndex, Matrix4f modelBindMatrix) throws IOException {
BoneWrapper boneWrapper = fetchFromCache("nodes", nodeIndex, BoneWrapper.class);
if (boneWrapper != null) {
return boneWrapper.bone;
}
JsonObject nodeData = nodes.get(nodeIndex).getAsJsonObject();
String name = getAsString(nodeData, "name");
if (name == null) {
name = "Bone_" + nodeIndex;
}
Bone bone = new Bone(name);
Transform boneTransforms = null;
boneTransforms = readTransforms(nodeData);
addToCache("nodes", nodeIndex, new BoneWrapper(bone, boneIndex, skinIndex, modelBindMatrix, boneTransforms), nodes.size());
return bone;
}
private void findChildren(int nodeIndex) throws IOException {
BoneWrapper bw = fetchFromCache("nodes", nodeIndex, BoneWrapper.class);
JsonObject nodeData = nodes.get(nodeIndex).getAsJsonObject();
JsonArray children = nodeData.getAsJsonArray("children");
if (children != null) {
for (JsonElement child : children) {
int childIndex = child.getAsInt();
BoneWrapper cbw = fetchFromCache("nodes", childIndex, BoneWrapper.class);
if (cbw != null) {
bw.bone.addChild(cbw.bone);
bw.children.add(childIndex);
} else {
//The child might be a Node
//Creating a dummy node to reed the subgraph
Node n = new Node();
readChild(n, child);
Spatial s = n.getChild(0);
//removing the spatial from the dummy node, it will be attached to the attachment node of the bone
s.removeFromParent();
bw.attachedSpatial = s;
}
}
}
}
private void setupControls() {
for (SkinData skinData : skinnedSpatials.keySet()) {
List<Spatial> spatials = skinnedSpatials.get(skinData);
Spatial spatial = skinData.parent;
if (spatials.size() >= 1) {
spatial = findCommonAncestor(spatials);
}
if (spatial != skinData.parent) {
skinData.rootBoneTransformOffset = spatial.getWorldTransform().invert();
skinData.rootBoneTransformOffset.combineWithParent(skinData.parent.getWorldTransform());
}
if (skinData.animControl != null && skinData.animControl.getSpatial() == null) {
spatial.addControl(skinData.animControl);
}
spatial.addControl(skinData.skeletonControl);
}
for (int i = 0; i < nodes.size(); i++) {
BoneWrapper bw = fetchFromCache("nodes", i, BoneWrapper.class);
if (bw == null || bw.attachedSpatial == null) {
continue;
}
SkinData skinData = fetchFromCache("skins", bw.skinIndex, SkinData.class);
skinData.skeletonControl.getAttachmentsNode(bw.bone.getName()).attachChild(bw.attachedSpatial);
}
}
private String readMeshName(int meshIndex) {
JsonObject meshData = meshes.get(meshIndex).getAsJsonObject();
return getAsString(meshData, "name");
}
public <T> T fetchFromCache(String name, int index, Class<T> type) {
Object[] data = dataCache.get(name);
if (data == null) {
return null;
}
try {
T ret = type.cast(data[index]);
return ret;
} catch (ClassCastException e) {
return null;
}
}
public void addToCache(String name, int index, Object object, int maxLength) {
Object[] data = dataCache.get(name);
if (data == null) {
data = new Object[maxLength];
dataCache.put(name, data);
}
data[index] = object;
}
public AssetInfo getInfo() {
return info;
}
public JsonObject getDocRoot() {
return docRoot;
}
public Node getRootNode() {
return rootNode;
}
public static class WeightData {
float value;
short index;
int componentSize;
public WeightData(float value, short index, int componentSize) {
this.value = value;
this.index = index;
this.componentSize = componentSize;
}
}
private class BoneWrapper {
Bone bone;
int boneIndex;
int skinIndex;
Transform localTransform;
Transform localTransformOffset;
Matrix4f modelBindMatrix;
boolean isRoot = false;
boolean localUpdated = false;
Spatial attachedSpatial;
List<Integer> children = new ArrayList<>();
public BoneWrapper(Bone bone, int boneIndex, int skinIndex, Matrix4f modelBindMatrix, Transform localTransform) {
this.bone = bone;
this.boneIndex = boneIndex;
this.skinIndex = skinIndex;
this.modelBindMatrix = modelBindMatrix;
this.localTransform = localTransform;
this.localTransformOffset = localTransform.clone();
}
/**
* Applies the inverse Bind transforms to anim data. and the armature transforms if relevant.
*/
public void update(TrackData data) {
Transform bindTransforms = new Transform(bone.getBindPosition(), bone.getBindRotation(), bone.getBindScale());
SkinData skinData = fetchFromCache("skins", skinIndex, SkinData.class);
if (!localUpdated) {
//LocalTransform of the bone are default position to use for animations when there is no track.
//We need to transform them so that JME can us them in blendAnimTransform.
reverseBlendAnimTransforms(localTransformOffset, bindTransforms);
localUpdated = true;
}
for (int i = 0; i < data.getNbKeyFrames(); i++) {
Vector3f translation = getTranslation(data, i);
Quaternion rotation = getRotation(data, i);
Vector3f scale = getScale(data, i);
Transform t = new Transform(translation, rotation, scale);
if (isRoot && skinData.rootBoneTransformOffset != null) {
//Apply the armature transforms to the root bone anim track.
t.combineWithParent(skinData.rootBoneTransformOffset);
}
reverseBlendAnimTransforms(t, bindTransforms);
if (data.translations != null) {
data.translations[i] = t.getTranslation();
}
if (data.rotations != null) {
data.rotations[i] = t.getRotation();
}
if (data.scales != null) {
data.scales[i] = t.getScale();
}
}
data.ensureTranslationRotations(localTransformOffset);
}
private void reverseBlendAnimTransforms(Transform t, Transform bindTransforms) {
//This is wrong
//You'd normally combine those transforms with transform.combineWithParent()
//Here we actually do in reverse what JME does to combine anim transforms with bind transfoms (add trans/mult rot/ mult scale)
//The code to fix is in Bone.blendAnimTransforms
//TODO fix blendAnimTransforms
t.getTranslation().subtractLocal(bindTransforms.getTranslation());
t.getScale().divideLocal(bindTransforms.getScale());
tmpQuat.set(bindTransforms.getRotation()).inverseLocal().multLocal(t.getRotation());
t.setRotation(tmpQuat);
}
private Vector3f getTranslation(TrackData data, int i) {
Vector3f translation;
if (data.translations == null) {
translation = bone.getLocalPosition();
} else {
translation = data.translations[i];
}
return translation;
}
private Quaternion getRotation(TrackData data, int i) {
Quaternion rotation;
if (data.rotations == null) {
rotation = bone.getLocalRotation();
} else {
rotation = data.rotations[i];
}
return rotation;
}
private Vector3f getScale(TrackData data, int i) {
Vector3f scale;
if (data.scales == null) {
scale = bone.getLocalScale();
} else {
scale = data.scales[i];
}
return scale;
}
}
private class SkinData {
SkeletonControl skeletonControl;
AnimControl animControl;
Spatial parent;
Transform rootBoneTransformOffset;
Bone[] bones;
boolean used = false;
}
public static class SkinBuffers {
short[] joints;
float[] weights;
int componentSize;
public SkinBuffers(short[] joints, int componentSize) {
this.joints = joints;
this.componentSize = componentSize;
}
public SkinBuffers() {
}
}
private interface Populator<T> {
T populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException;
}
private class VertexBufferPopulator implements Populator<VertexBuffer> {
VertexBuffer.Type bufferType;
public VertexBufferPopulator(VertexBuffer.Type bufferType) {
this.bufferType = bufferType;
}
@Override
public VertexBuffer populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
if (bufferType == null) {
logger.log(Level.WARNING, "could not assign data to any VertexBuffer type for buffer view " + bufferViewIndex);
return null;
}
VertexBuffer vb = new VertexBuffer(bufferType);
VertexBuffer.Format format = getVertexBufferFormat(componentType);
VertexBuffer.Format originalFormat = format;
if (normalized) {
//Some float data can be packed into short buffers, "normalized" means they have to be unpacked.
//In that case the buffer is a FloatBuffer
format = VertexBuffer.Format.Float;
}
int numComponents = getNumberOfComponents(type);
Buffer buff = VertexBuffer.createBuffer(format, numComponents, count);
int bufferSize = numComponents * count;
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the buffer with zeros.
padBuffer(buff, bufferSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, buff, numComponents, originalFormat);
}
if (bufferType == VertexBuffer.Type.Index) {
numComponents = 3;
}
vb.setupData(VertexBuffer.Usage.Dynamic, numComponents, format, buff);
return vb;
}
}
private class FloatArrayPopulator implements Populator<float[]> {
@Override
public float[] populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
int numComponents = getNumberOfComponents(type);
int dataSize = numComponents * count;
float[] data = new float[dataSize];
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the data with zeros.
padBuffer(data, dataSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, data, numComponents, getVertexBufferFormat(componentType));
}
return data;
}
}
private class Vector3fArrayPopulator implements Populator<Vector3f[]> {
@Override
public Vector3f[] populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
int numComponents = getNumberOfComponents(type);
int dataSize = numComponents * count;
Vector3f[] data = new Vector3f[count];
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the data with zeros.
padBuffer(data, dataSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, data, numComponents, getVertexBufferFormat(componentType));
}
return data;
}
}
private class QuaternionArrayPopulator implements Populator<Quaternion[]> {
@Override
public Quaternion[] populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
int numComponents = getNumberOfComponents(type);
int dataSize = numComponents * count;
Quaternion[] data = new Quaternion[count];
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the data with zeros.
padBuffer(data, dataSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, data, numComponents, getVertexBufferFormat(componentType));
}
return data;
}
}
private class Matrix4fArrayPopulator implements Populator<Matrix4f[]> {
@Override
public Matrix4f[] populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
int numComponents = getNumberOfComponents(type);
int dataSize = numComponents * count;
Matrix4f[] data = new Matrix4f[count];
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the data with zeros.
padBuffer(data, dataSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, data, numComponents, getVertexBufferFormat(componentType));
}
return data;
}
}
private class JointArrayPopulator implements Populator<SkinBuffers> {
@Override
public SkinBuffers populate(Integer bufferViewIndex, int componentType, String type, int count, int byteOffset, boolean normalized) throws IOException {
int numComponents = getNumberOfComponents(type);
//can be bytes or shorts.
VertexBuffer.Format format = VertexBuffer.Format.Byte;
if (componentType == 5123) {
format = VertexBuffer.Format.Short;
}
int dataSize = numComponents * count;
short[] data = new short[dataSize];
if (bufferViewIndex == null) {
//no referenced buffer, specs says to pad the data with zeros.
padBuffer(data, dataSize);
} else {
readBuffer(bufferViewIndex, byteOffset, count, data, numComponents, format);
}
return new SkinBuffers(data, format.getComponentSize());
}
}
}
|
Fixes issues in the gltf loader when there are several skins
|
jme3-plugins/src/gltf/java/com/jme3/scene/plugins/gltf/GltfLoader.java
|
Fixes issues in the gltf loader when there are several skins
|
|
Java
|
bsd-3-clause
|
d636cd03957625184058ac44d1b95ece25227de0
| 0
|
motech/perf,motech/perf,motech/perf
|
package org.motechproject.kil3.service;
import com.google.common.base.Strings;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.DefaultHttpClient;
import org.joda.time.DateTime;
import org.motechproject.event.MotechEvent;
import org.motechproject.event.listener.EventRelay;
import org.motechproject.event.listener.annotations.MotechListener;
import org.motechproject.kil3.database.*;
import org.motechproject.mds.query.QueryParams;
import org.motechproject.scheduler.contract.RunOnceSchedulableJob;
import org.motechproject.scheduler.service.MotechSchedulerService;
import org.motechproject.server.config.SettingsFacade;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
import java.io.*;
import java.net.URISyntaxException;
import java.util.*;
@Service("kil3Service")
public class Kil3ServiceImpl implements Kil3Service {
private final static String CALL_DIRECTORY = "kil3.call_directory";
private final static String CALL_SERVER_URL = "kil3.call_server_url";
private final static String CDR_DIRECTORY = "kil3.cdr_directory";
private static final String CREATE_CALL_FILE = "create_call_file";
private static final String PROCESS_CDR_FILE = "process_cdr_file";
private static final String PROCESS_ONE_CDR = "process_one_cdr";
private static final Integer MAX_RECIPIENT_BLOCK = 10000;
private final static String REDIS_SERVER_PROPERTY = "kil3.redis_server";
private final static long MILLIS_PER_SECOND = 1000;
private Logger LOGGER = LoggerFactory.getLogger(Kil3ServiceImpl.class);
private SettingsFacade settingsFacade;
private EventRelay eventRelay;
private RecipientDataService recipientDataService;
private CallHistoryDataService callHistoryDataService;
private MotechSchedulerService schedulerService;
private List<String> dayList = Arrays.asList("1", "2", "3", "4", "5", "6", "7");;
private JedisPool jedisPool;
@Autowired
public Kil3ServiceImpl(@Qualifier("kil3Settings") SettingsFacade settingsFacade, EventRelay eventRelay,
RecipientDataService recipientDataService, CallHistoryDataService callHistoryDataService,
MotechSchedulerService schedulerService) {
this.settingsFacade = settingsFacade;
this.eventRelay = eventRelay;
this.recipientDataService = recipientDataService;
this.callHistoryDataService = callHistoryDataService;
this.schedulerService = schedulerService;
String redisServer = settingsFacade.getProperty(REDIS_SERVER_PROPERTY);
LOGGER.info("redis server: {}", redisServer);
jedisPool = new JedisPool(new JedisPoolConfig(), redisServer);
}
private static String redisJobExpectations(String jobId) {
return String.format("%s-expectations", jobId);
}
private static String redisJobExpecting(String jobId) {
return String.format("%s-expecting", jobId);
}
private static String redisJobTimer(String jobId) {
return String.format("%s-timer", jobId);
}
private static long redisTime(Jedis jedis) {
List<String> t = jedis.time();
return Long.valueOf(t.get(0)) * 1000 + Long.valueOf(t.get(1)) / 1000;
}
private void setExpectations(String jobId, long count) {
LOGGER.info("setExpectations({}, {})", jobId, count);
try (Jedis jedis = jedisPool.getResource()) {
jedis.set(redisJobExpectations(jobId), String.valueOf(count));
jedis.set(redisJobExpecting(jobId), String.valueOf(count));
jedis.del(redisJobTimer(jobId));
}
}
private void meetExpectation(String jobId) {
LOGGER.debug("meetExpectation({})", jobId);
try (Jedis jedis = jedisPool.getResource()) {
// Start timer if not already started
if (!jedis.exists(redisJobTimer(jobId))) {
List<String> t = jedis.time();
jedis.setnx(redisJobTimer(jobId), String.valueOf(redisTime(jedis)));
}
long expecting = jedis.decr(redisJobExpecting(jobId));
// All expectations met
if (expecting <= 0) {
List<String> t = jedis.time();
long milliStop = redisTime(jedis);
long milliStart = Long.valueOf(jedis.get(redisJobTimer(jobId)));
long millis = milliStop - milliStart;
String expectationsString = jedis.get(redisJobExpectations(jobId));
if (Strings.isNullOrEmpty(expectationsString)) {
LOGGER.warn("meetExpectation was called on a null redis key: {}", redisJobExpectations(jobId));
} else {
long expectations = Long.valueOf(expectationsString);
float rate = (float) expectations * MILLIS_PER_SECOND / millis;
LOGGER.info("Measured {} calls at {} calls/second", expectations, rate);
jedis.del(redisJobExpectations(jobId));
jedis.del(redisJobExpecting(jobId));
jedis.del(redisJobTimer(jobId));
}
} else if (expecting % 1000 == 0) {
long milliStop = redisTime(jedis);
long milliStart = Long.valueOf(jedis.get(redisJobTimer(jobId)));
long millis = milliStop - milliStart;
long expectations = Long.valueOf(jedis.get(redisJobExpectations(jobId)));
long count = expectations - expecting;
float rate = (float) count * MILLIS_PER_SECOND / millis;
LOGGER.info(String.format("Expectations: %d/%d @ %f/s", expecting, expectations, rate));
}
}
}
private String callFileName(String day) {
return String.format("%sday%s-calls.csv", settingsFacade.getProperty(CALL_DIRECTORY), day);
}
private String cdrFileName(String day) {
return String.format("%sday%s-cdrs.csv", settingsFacade.getProperty(CDR_DIRECTORY), day);
}
private String callTempFileName(String day) {
return String.format("%s~", callFileName(day));
}
private void sendCallHttpRequest(String day) {
LOGGER.debug("sendCallHttpRequest(day={})", day);
String uri = String.format("%s/call?day=%s", settingsFacade.getProperty(CALL_SERVER_URL), day);
HttpUriRequest request;
URIBuilder builder;
try {
builder = new URIBuilder(uri);
builder.setParameter("day", day);
request = new HttpGet(builder.build());
} catch (URISyntaxException e) {
String message = "Unexpected error creating a URI";
LOGGER.warn(message);
throw new IllegalStateException(message, e);
}
LOGGER.debug("Generated {}", request.toString());
HttpResponse response;
try {
DefaultHttpClient client = new DefaultHttpClient();
response = client.execute(request);
} catch (IOException e) {
String message = String.format("Could not initiate call, unexpected exception: %s", e.toString());
LOGGER.warn(message);
throw new IllegalStateException(message, e);
}
}
@MotechListener(subjects = { CREATE_CALL_FILE })
public void handleCreateCallFile(MotechEvent event) {
LOGGER.info("handleCreateCallFile(event={})", event.toString());
String day = (String)event.getParameters().get("day");
String callFileName = callFileName(day);
String callTempFileName = callTempFileName(day);
long milliStart = System.currentTimeMillis();
String ret;
try (PrintWriter writer = new PrintWriter(callTempFileName, "UTF-8")) {
int page = 1;
int numBlockRecipients = 0;
long numRecipients = 0;
do {
List<Recipient> recipients = recipientDataService.findByDay(day,
new QueryParams(page, MAX_RECIPIENT_BLOCK));
numBlockRecipients = recipients.size();
for (Recipient recipient : recipients) {
writer.print(recipientDataService.getDetachedField(recipient, "id"));
writer.print(",");
writer.print(recipient.getPhone());
writer.print(",");
writer.print(recipient.pregnancyWeek());
writer.print(",");
writer.println(recipient.getLanguage());
}
page++;
numRecipients += numBlockRecipients;
if (numBlockRecipients > 0) {
long millis = System.currentTimeMillis() - milliStart;
float rate = (float) numRecipients * MILLIS_PER_SECOND / millis;
LOGGER.info(String.format("Read %d %s @ %s/sec", numRecipients,
numRecipients == 1 ? "recipient" : "recipients", rate));
}
} while (numBlockRecipients > 0);
long millis = System.currentTimeMillis() - milliStart;
float rate = (float) numRecipients * MILLIS_PER_SECOND / millis;
LOGGER.info(String.format("Wrote %d %s to %s in %dms (%s/sec)", numRecipients,
numRecipients == 1 ? "call" : "calls", callTempFileName, millis, rate));
ret = String.format("%s %d calls (%s/sec)", callFileName, numRecipients, rate);
} catch (FileNotFoundException | UnsupportedEncodingException e) {
String error = String.format("Unable to create temp call file %s: %s", callTempFileName, e.getMessage());
LOGGER.error(error);
return;
}
File fOld = new File(callFileName);
if (fOld.exists()) {
LOGGER.info("Deleting old file {}...", callFileName);
fOld.delete();
}
LOGGER.info("Renaming temp file {} to {}...", callTempFileName, callFileName);
File fTmp = new File(callTempFileName);
fTmp.renameTo(new File(callFileName));
LOGGER.info("Altering timestamp on {}...", callFileName);
File fCall = new File(callFileName);
fCall.setLastModified(fCall.lastModified()+1);
LOGGER.info("Informing the IVR system the call file is available...");
sendCallHttpRequest(day);
}
public String createCallFile(String day) {
LOGGER.info("createCallFile(day={})", day);
if (!dayList.contains(day)) {
return String.format("%s is not a valid day. Valid days: %s", day, dayList);
}
Map<String, Object> eventParams = new HashMap<>();
eventParams.put("day", day);
MotechEvent motechEvent = new MotechEvent(CREATE_CALL_FILE, eventParams);
schedulerService.safeScheduleRunOnceJob(new RunOnceSchedulableJob(motechEvent,
DateTime.now().plusSeconds(1).toDate()));
return "OK";
}
public String getRecipients() {
LOGGER.debug("getRecipients()");
StringBuilder sb = new StringBuilder();
String sep = "";
for (String day : dayList) {
sb.append(sep);
sb.append(recipientDataService.countFindByDay(day));
if (sep.isEmpty()) {
sep = " ";
}
}
return sb.toString();
}
private int callStatusSlotIncrement(CallStatus callStatus) {
switch (callStatus) {
case NA:
return 2;
case ND:
case SO:
return 4;
default:
throw new IllegalArgumentException();
}
}
private void addCallHistory(Recipient recipient, CallStatus callStatus, RecipientStatus recipientStatus) {
CallHistory recipientHistory = new CallHistory(recipient.getDay(), recipient.getCallStage(), recipient.getPhone(),
recipient.getLanguage(), recipient.getExpectedDeliveryDate(), callStatus, recipientStatus);
callHistoryDataService.create(recipientHistory);
}
@MotechListener(subjects = {PROCESS_ONE_CDR})
public void processOneCDR(MotechEvent event) {
LOGGER.debug("processOneCDR(event={})", event.toString());
String line = (String)event.getParameters().get("CDR");
CallDetailRecord cdr = CallDetailRecord.fromString(line);
LOGGER.debug("Processing slotting for {}...", cdr);
Recipient recipient = recipientDataService.findById(Long.parseLong(cdr.getRecipient()));
String day = recipient.getDay();
CallStatus callStatus = cdr.getCallStatus();
meetExpectation("CDR");
if (CallStatus.OK == callStatus) {
if (recipient.getInitialDay().equals(day)) {
addCallHistory(recipient, callStatus, RecipientStatus.AC);
return;
} else {
recipient.setDay(recipient.getInitialDay());
}
} else {
switch (recipient.getCallStage()) {
case FB:
recipient.setCallStage(CallStage.R1);
recipient.incDay();
break;
case R1:
recipient.setCallStage(CallStage.R2);
recipient.incDay();
break;
case R2:
recipient.setCallStage(CallStage.R3);
recipient.incDay();
break;
case R3:
recipient.setCallStage(CallStage.FB);
recipient.setDay(recipient.getInitialDay());
break;
}
}
recipientDataService.update(recipient);
addCallHistory(recipient, callStatus, RecipientStatus.AC);
}
//
// This is simplistic. The real system should periodically deal with 'orphan' calls which were somehow not
// included in a CDR file and must be reslotted for their next week..
//
@MotechListener(subjects = { PROCESS_CDR_FILE })
public void processCDRFile(MotechEvent event) {
LOGGER.info("processCDRFile(event={})", event.toString());
String path = (String)event.getParameters().get("file");
long milliStart = System.currentTimeMillis();
List<String> cdrs = new ArrayList<>();
try(BufferedReader br = new BufferedReader(new FileReader(path))) {
String line;
int lineCount = 0;
while ((line = br.readLine()) != null) {
try {
if (Strings.isNullOrEmpty(line)) {
LOGGER.debug("{}({}): Skipping blank line", path, lineCount + 1);
continue;
}
Map<String, Object> eventParams = new HashMap<>();
CallDetailRecord.validate(line);
cdrs.add(line);
} catch (Exception e) {
LOGGER.error("{}({}): invalid CDR format", path, lineCount + 1);
}
lineCount++;
}
long millis = System.currentTimeMillis() - milliStart;
float rate = (float) lineCount * MILLIS_PER_SECOND / millis;
LOGGER.info(String.format("Read %d %s in %ss @ (%s/sec)", lineCount, lineCount == 1 ? "line" : "lines",
millis / 1000, rate));
} catch (IOException e) {
LOGGER.error("Error while reading {}: {}", path, e.getMessage());
}
setExpectations("CDR", cdrs.size());
milliStart = System.currentTimeMillis();
int cdrCount = 0;
for (String line : cdrs) {
Map<String, Object> eventParams = new HashMap<>();
eventParams.put("CDR", line);
MotechEvent motechEvent = new MotechEvent(PROCESS_ONE_CDR, eventParams);
eventRelay.sendEventMessage(motechEvent);
//processOneCDR(motechEvent);
cdrCount++;
if (cdrCount % 10000 == 0) {
long millis = System.currentTimeMillis() - milliStart;
float rate = (float) cdrCount * MILLIS_PER_SECOND / millis;
LOGGER.info(String.format("Queued %d cdrs for processing in %ss @ (%s/sec)", cdrCount, millis / 1000,
rate));
}
}
long millis = System.currentTimeMillis() - milliStart;
float rate = (float) cdrs.size() * MILLIS_PER_SECOND / millis;
if (cdrCount % 10000 != 0) {
LOGGER.info(String.format("Queued %d %s for processing in %ss @ (%s/sec)", cdrs.size(),
cdrs.size() == 1 ? "cdr" : "cdrs", millis / 1000, rate));
}
}
public String processCallDetailRecords(String day) {
LOGGER.debug("processCallDetailRecords(day={})", day);
Map<String, Object> eventParams = new HashMap<>();
eventParams.put("file", cdrFileName(day));
MotechEvent motechEvent = new MotechEvent(PROCESS_CDR_FILE, eventParams);
schedulerService.safeScheduleRunOnceJob(new RunOnceSchedulableJob(motechEvent,
DateTime.now().plusSeconds(1).toDate()));
//processCDRFile(motechEvent);
return "OK";
}
}
|
kil3/src/main/java/org/motechproject/kil3/service/Kil3ServiceImpl.java
|
package org.motechproject.kil3.service;
import com.google.common.base.Strings;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.impl.client.DefaultHttpClient;
import org.joda.time.DateTime;
import org.motechproject.event.MotechEvent;
import org.motechproject.event.listener.EventRelay;
import org.motechproject.event.listener.annotations.MotechListener;
import org.motechproject.kil3.database.*;
import org.motechproject.mds.query.QueryParams;
import org.motechproject.scheduler.contract.RunOnceSchedulableJob;
import org.motechproject.scheduler.service.MotechSchedulerService;
import org.motechproject.server.config.SettingsFacade;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.JedisPoolConfig;
import java.io.*;
import java.net.URISyntaxException;
import java.util.*;
@Service("kil3Service")
public class Kil3ServiceImpl implements Kil3Service {
private final static String CALL_DIRECTORY = "kil3.call_directory";
private final static String CALL_SERVER_URL = "kil3.call_server_url";
private final static String CDR_DIRECTORY = "kil3.cdr_directory";
private static final String CREATE_CALL_FILE = "create_call_file";
private static final String PROCESS_CDR_FILE = "process_cdr_file";
private static final String PROCESS_ONE_CDR = "process_one_cdr";
private static final Integer MAX_RECIPIENT_BLOCK = 10000;
private final static String REDIS_SERVER_PROPERTY = "kil3.redis_server";
private final static long MILLIS_PER_SECOND = 1000;
private Logger LOGGER = LoggerFactory.getLogger(Kil3ServiceImpl.class);
private SettingsFacade settingsFacade;
private EventRelay eventRelay;
private RecipientDataService recipientDataService;
private CallHistoryDataService callHistoryDataService;
private MotechSchedulerService schedulerService;
private List<String> dayList = Arrays.asList("1", "2", "3", "4", "5", "6", "7");;
private JedisPool jedisPool;
@Autowired
public Kil3ServiceImpl(@Qualifier("kil3Settings") SettingsFacade settingsFacade, EventRelay eventRelay,
RecipientDataService recipientDataService, CallHistoryDataService callHistoryDataService,
MotechSchedulerService schedulerService) {
this.settingsFacade = settingsFacade;
this.eventRelay = eventRelay;
this.recipientDataService = recipientDataService;
this.callHistoryDataService = callHistoryDataService;
this.schedulerService = schedulerService;
String redisServer = settingsFacade.getProperty(REDIS_SERVER_PROPERTY);
LOGGER.info("redis server: {}", redisServer);
jedisPool = new JedisPool(new JedisPoolConfig(), redisServer);
}
private static String redisJobExpectations(String jobId) {
return String.format("%s-expectations", jobId);
}
private static String redisJobExpecting(String jobId) {
return String.format("%s-expecting", jobId);
}
private static String redisJobTimer(String jobId) {
return String.format("%s-timer", jobId);
}
private static long redisTime(Jedis jedis) {
List<String> t = jedis.time();
return Long.valueOf(t.get(0)) * 1000 + Long.valueOf(t.get(1)) / 1000;
}
private void setExpectations(String jobId, long count) {
LOGGER.info("setExpectations({}, {})", jobId, count);
try (Jedis jedis = jedisPool.getResource()) {
jedis.set(redisJobExpectations(jobId), String.valueOf(count));
jedis.set(redisJobExpecting(jobId), String.valueOf(count));
jedis.del(redisJobTimer(jobId));
}
}
private void meetExpectation(String jobId) {
LOGGER.debug("meetExpectation({})", jobId);
try (Jedis jedis = jedisPool.getResource()) {
// Start timer if not already started
if (!jedis.exists(redisJobTimer(jobId))) {
List<String> t = jedis.time();
jedis.setnx(redisJobTimer(jobId), String.valueOf(redisTime(jedis)));
}
long expecting = jedis.decr(redisJobExpecting(jobId));
// All expectations met
if (expecting <= 0) {
List<String> t = jedis.time();
long milliStop = redisTime(jedis);
long milliStart = Long.valueOf(jedis.get(redisJobTimer(jobId)));
long millis = milliStop - milliStart;
String expectationsString = jedis.get(redisJobExpectations(jobId));
if (Strings.isNullOrEmpty(expectationsString)) {
LOGGER.warn("meetExpectation was called on a null redis key: {}", redisJobExpectations(jobId));
} else {
long expectations = Long.valueOf(expectationsString);
float rate = (float) expectations * MILLIS_PER_SECOND / millis;
LOGGER.info("Measured {} calls at {} calls/second", expectations, rate);
jedis.del(redisJobExpectations(jobId));
jedis.del(redisJobExpecting(jobId));
jedis.del(redisJobTimer(jobId));
}
} else if (expecting % 1000 == 0) {
long milliStop = redisTime(jedis);
long milliStart = Long.valueOf(jedis.get(redisJobTimer(jobId)));
long millis = milliStop - milliStart;
long expectations = Long.valueOf(jedis.get(redisJobExpectations(jobId)));
long count = expectations - expecting;
float rate = (float) count * MILLIS_PER_SECOND / millis;
LOGGER.info(String.format("Expectations: %d/%d @ %f/s", expecting, expectations, rate));
}
}
}
private String callFileName(String day) {
return String.format("%sday%s-calls.csv", settingsFacade.getProperty(CALL_DIRECTORY), day);
}
private String cdrFileName(String day) {
return String.format("%sday%s-cdrs.csv", settingsFacade.getProperty(CDR_DIRECTORY), day);
}
private String callTempFileName(String day) {
return String.format("%s~", callFileName(day));
}
private void sendCallHttpRequest(String day) {
LOGGER.debug("sendCallHttpRequest(day={})", day);
String uri = String.format("%s/call?day=%s", settingsFacade.getProperty(CALL_SERVER_URL), day);
HttpUriRequest request;
URIBuilder builder;
try {
builder = new URIBuilder(uri);
builder.setParameter("day", day);
request = new HttpGet(builder.build());
} catch (URISyntaxException e) {
String message = "Unexpected error creating a URI";
LOGGER.warn(message);
throw new IllegalStateException(message, e);
}
LOGGER.debug("Generated {}", request.toString());
HttpResponse response;
try {
DefaultHttpClient client = new DefaultHttpClient();
response = client.execute(request);
} catch (IOException e) {
String message = String.format("Could not initiate call, unexpected exception: %s", e.toString());
LOGGER.warn(message);
throw new IllegalStateException(message, e);
}
}
@MotechListener(subjects = { CREATE_CALL_FILE })
public void handleCreateCallFile(MotechEvent event) {
LOGGER.info("handleCreateCallFile(event={})", event.toString());
String day = (String)event.getParameters().get("day");
String callFileName = callFileName(day);
String callTempFileName = callTempFileName(day);
long milliStart = System.currentTimeMillis();
String ret;
try (PrintWriter writer = new PrintWriter(callTempFileName, "UTF-8")) {
int page = 1;
int numBlockRecipients = 0;
long numRecipients = 0;
do {
List<Recipient> recipients = recipientDataService.findByDay(day,
new QueryParams(page, MAX_RECIPIENT_BLOCK));
numBlockRecipients = recipients.size();
for (Recipient recipient : recipients) {
writer.print(recipientDataService.getDetachedField(recipient, "id"));
writer.print(",");
writer.print(recipient.getPhone());
writer.print(",");
writer.print(recipient.pregnancyWeek());
writer.print(",");
writer.println(recipient.getLanguage());
}
page++;
numRecipients += numBlockRecipients;
if (numBlockRecipients > 0) {
long millis = System.currentTimeMillis() - milliStart;
float rate = (float) numRecipients * MILLIS_PER_SECOND / millis;
LOGGER.info(String.format("Read %d %s @ %s/sec", numRecipients,
numRecipients == 1 ? "recipient" : "recipients", rate));
}
} while (numBlockRecipients > 0);
long millis = System.currentTimeMillis() - milliStart;
float rate = (float) numRecipients * MILLIS_PER_SECOND / millis;
LOGGER.info(String.format("Wrote %d %s to %s in %dms (%s/sec)", numRecipients,
numRecipients == 1 ? "call" : "calls", callTempFileName, millis, rate));
ret = String.format("%s %d calls (%s/sec)", callFileName, numRecipients, rate);
} catch (FileNotFoundException | UnsupportedEncodingException e) {
String error = String.format("Unable to create temp call file %s: %s", callTempFileName, e.getMessage());
LOGGER.error(error);
return;
}
File fOld = new File(callFileName);
if (fOld.exists()) {
LOGGER.info("Deleting old file {}...", callFileName);
fOld.delete();
}
LOGGER.info("Renaming temp file {} to {}...", callTempFileName, callFileName);
File fTmp = new File(callTempFileName);
fTmp.renameTo(new File(callFileName));
LOGGER.info("Altering timestamp on {}...", callFileName);
File fCall = new File(callFileName);
fCall.setLastModified(fCall.lastModified()+1);
LOGGER.info("Informing the IVR system the call file is available...");
sendCallHttpRequest(day);
}
public String createCallFile(String day) {
LOGGER.info("createCallFile(day={})", day);
if (!dayList.contains(day)) {
return String.format("%s is not a valid day. Valid days: %s", day, dayList);
}
Map<String, Object> eventParams = new HashMap<>();
eventParams.put("day", day);
MotechEvent motechEvent = new MotechEvent(CREATE_CALL_FILE, eventParams);
schedulerService.safeScheduleRunOnceJob(new RunOnceSchedulableJob(motechEvent,
DateTime.now().plusSeconds(1).toDate()));
return "OK";
}
public String getRecipients() {
LOGGER.debug("getRecipients()");
StringBuilder sb = new StringBuilder();
String sep = "";
for (String day : dayList) {
sb.append(sep);
sb.append(recipientDataService.countFindByDay(day));
if (sep.isEmpty()) {
sep = " ";
}
}
return sb.toString();
}
private int callStatusSlotIncrement(CallStatus callStatus) {
switch (callStatus) {
case NA:
return 2;
case ND:
case SO:
return 4;
default:
throw new IllegalArgumentException();
}
}
private void addCallHistory(Recipient recipient, CallStatus callStatus, RecipientStatus recipientStatus) {
CallHistory recipientHistory = new CallHistory(recipient.getDay(), recipient.getCallStage(), recipient.getPhone(),
recipient.getLanguage(), recipient.getExpectedDeliveryDate(), callStatus, recipientStatus);
callHistoryDataService.create(recipientHistory);
}
@MotechListener(subjects = {PROCESS_ONE_CDR})
public void processOneCDR(MotechEvent event) {
LOGGER.debug("processOneCDR(event={})", event.toString());
String line = (String)event.getParameters().get("CDR");
CallDetailRecord cdr = CallDetailRecord.fromString(line);
LOGGER.debug("Processing slotting for {}...", cdr);
Recipient recipient = recipientDataService.findById(Long.parseLong(cdr.getRecipient()));
String day = recipient.getDay();
CallStatus callStatus = cdr.getCallStatus();
meetExpectation("CDR");
if (CallStatus.OK == callStatus) {
if (recipient.getInitialDay().equals(day)) {
addCallHistory(recipient, callStatus, RecipientStatus.AC);
return;
} else {
recipient.setDay(recipient.getInitialDay());
}
} else {
switch (recipient.getCallStage()) {
case FB:
case R1:
case R2:
recipient.setCallStage(CallStage.R1);
recipient.incDay();
break;
case R3:
recipient.setCallStage(CallStage.FB);
recipient.setDay(recipient.getInitialDay());
break;
}
}
recipientDataService.update(recipient);
addCallHistory(recipient, callStatus, RecipientStatus.AC);
}
//
// This is simplistic. The real system should periodically deal with 'orphan' calls which were somehow not
// included in a CDR file and must be reslotted for their next week..
//
@MotechListener(subjects = { PROCESS_CDR_FILE })
public void processCDRFile(MotechEvent event) {
LOGGER.info("processCDRFile(event={})", event.toString());
String path = (String)event.getParameters().get("file");
long milliStart = System.currentTimeMillis();
List<String> cdrs = new ArrayList<>();
try(BufferedReader br = new BufferedReader(new FileReader(path))) {
String line;
int lineCount = 0;
while ((line = br.readLine()) != null) {
try {
if (Strings.isNullOrEmpty(line)) {
LOGGER.debug("{}({}): Skipping blank line", path, lineCount + 1);
continue;
}
Map<String, Object> eventParams = new HashMap<>();
CallDetailRecord.validate(line);
cdrs.add(line);
} catch (Exception e) {
LOGGER.error("{}({}): invalid CDR format", path, lineCount + 1);
}
lineCount++;
}
long millis = System.currentTimeMillis() - milliStart;
float rate = (float) lineCount * MILLIS_PER_SECOND / millis;
LOGGER.info(String.format("Read %d %s in %ss @ (%s/sec)", lineCount, lineCount == 1 ? "line" : "lines",
millis / 1000, rate));
} catch (IOException e) {
LOGGER.error("Error while reading {}: {}", path, e.getMessage());
}
setExpectations("CDR", cdrs.size());
milliStart = System.currentTimeMillis();
int cdrCount = 0;
for (String line : cdrs) {
Map<String, Object> eventParams = new HashMap<>();
eventParams.put("CDR", line);
MotechEvent motechEvent = new MotechEvent(PROCESS_ONE_CDR, eventParams);
eventRelay.sendEventMessage(motechEvent);
//processOneCDR(motechEvent);
cdrCount++;
if (cdrCount % 10000 == 0) {
long millis = System.currentTimeMillis() - milliStart;
float rate = (float) cdrCount * MILLIS_PER_SECOND / millis;
LOGGER.info(String.format("Queued %d cdrs for processing in %ss @ (%s/sec)", cdrCount, millis / 1000,
rate));
}
}
long millis = System.currentTimeMillis() - milliStart;
float rate = (float) cdrs.size() * MILLIS_PER_SECOND / millis;
if (cdrCount % 10000 != 0) {
LOGGER.info(String.format("Queued %d %s for processing in %ss @ (%s/sec)", cdrs.size(),
cdrs.size() == 1 ? "cdr" : "cdrs", millis / 1000, rate));
}
}
public String processCallDetailRecords(String day) {
LOGGER.debug("processCallDetailRecords(day={})", day);
Map<String, Object> eventParams = new HashMap<>();
eventParams.put("file", cdrFileName(day));
MotechEvent motechEvent = new MotechEvent(PROCESS_CDR_FILE, eventParams);
schedulerService.safeScheduleRunOnceJob(new RunOnceSchedulableJob(motechEvent,
DateTime.now().plusSeconds(1).toDate()));
//processCDRFile(motechEvent);
return "OK";
}
}
|
staging bug
|
kil3/src/main/java/org/motechproject/kil3/service/Kil3ServiceImpl.java
|
staging bug
|
|
Java
|
mit
|
32c1ebac2b4e8db4523dc4427f5a0fc7e3141f6d
| 0
|
rebeccahughes/react-native-device-info,rebeccahughes/react-native-device-info,rebeccahughes/react-native-device-info,rebeccahughes/react-native-device-info,rebeccahughes/react-native-device-info,rebeccahughes/react-native-device-info
|
package com.learnium.RNDeviceInfo;
import android.content.SharedPreferences;
import android.content.Context;
import android.os.RemoteException;
import android.util.Log;
import com.android.installreferrer.api.InstallReferrerClient;
import com.android.installreferrer.api.InstallReferrerStateListener;
import com.android.installreferrer.api.ReferrerDetails;
public class RNInstallReferrerClient {
private SharedPreferences sharedPreferences;
private InstallReferrerClient mReferrerClient;
RNInstallReferrerClient(Context context) {
sharedPreferences = context.getSharedPreferences("react-native-device-info", Context.MODE_PRIVATE);
mReferrerClient = InstallReferrerClient.newBuilder(context).build();
try {
mReferrerClient.startConnection(installReferrerStateListener);
} catch (Exception e) {
// This is almost always a PermissionException. Log it and move on
System.err.println("InstallReferrer exception. getInstallReferrer will be unavailable: " + e.getMessage());
}
}
private String getInstallReferrer() {
try {
return mReferrerClient
.getInstallReferrer()
.getInstallReferrer();
} catch (RemoteException e) {
e.printStackTrace();
return null;
}
}
private InstallReferrerStateListener installReferrerStateListener =
new InstallReferrerStateListener() {
@Override public void onInstallReferrerSetupFinished(int responseCode) {
switch (responseCode) {
case InstallReferrerClient.InstallReferrerResponse.OK:
// Connection established
try {
if (BuildConfig.DEBUG) Log.d("InstallReferrerState", "OK");
ReferrerDetails response = mReferrerClient.getInstallReferrer();
response.getInstallReferrer();
response.getReferrerClickTimestampSeconds();
response.getInstallBeginTimestampSeconds();
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString("installReferrer", getInstallReferrer());
editor.apply();
mReferrerClient.endConnection();
} catch (Exception e) {
e.printStackTrace();
}
break;
case InstallReferrerClient.InstallReferrerResponse.FEATURE_NOT_SUPPORTED:
if (BuildConfig.DEBUG) Log.d("InstallReferrerState", "FEATURE_NOT_SUPPORTED");
// API not available on the current Play Store app
break;
case InstallReferrerClient.InstallReferrerResponse.SERVICE_UNAVAILABLE:
if (BuildConfig.DEBUG) Log.d("InstallReferrerState", "SERVICE_UNAVAILABLE");
// Connection could not be established
break;
}
}
@Override public void onInstallReferrerServiceDisconnected() {
// Try to restart the connection on the next request to
// Google Play by calling the startConnection() method.
mReferrerClient.startConnection(installReferrerStateListener);
}
};
}
|
android/src/main/java/com/learnium/RNDeviceInfo/RNInstallReferrerClient.java
|
package com.learnium.RNDeviceInfo;
import android.content.SharedPreferences;
import android.content.Context;
import android.os.RemoteException;
import android.util.Log;
import com.android.installreferrer.api.InstallReferrerClient;
import com.android.installreferrer.api.InstallReferrerStateListener;
import com.android.installreferrer.api.ReferrerDetails;
public class RNInstallReferrerClient {
private SharedPreferences sharedPreferences;
private InstallReferrerClient mReferrerClient;
RNInstallReferrerClient(Context context) {
sharedPreferences = context.getSharedPreferences("react-native-device-info", Context.MODE_PRIVATE);
mReferrerClient = InstallReferrerClient.newBuilder(context).build();
try {
mReferrerClient.startConnection(installReferrerStateListener);
} catch (Exception e) {
// This is almost always a PermissionException. Log it and move on
System.err.println("InstallReferrer exception. getInstallReferrer will be unavailable: " + e.getMessage());
}
}
private String getInstallReferrer() {
try {
return mReferrerClient
.getInstallReferrer()
.getInstallReferrer();
} catch (RemoteException e) {
e.printStackTrace();
return null;
}
}
private InstallReferrerStateListener installReferrerStateListener =
new InstallReferrerStateListener() {
@Override public void onInstallReferrerSetupFinished(int responseCode) {
switch (responseCode) {
case InstallReferrerClient.InstallReferrerResponse.OK:
// Connection established
try {
if (BuildConfig.DEBUG) Log.d("InstallReferrerState", "OK");
ReferrerDetails response = mReferrerClient.getInstallReferrer();
response.getInstallReferrer();
response.getReferrerClickTimestampSeconds();
response.getInstallBeginTimestampSeconds();
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString("installReferrer", getInstallReferrer());
editor.apply();
mReferrerClient.endConnection();
} catch (RemoteException e) {
e.printStackTrace();
}
break;
case InstallReferrerClient.InstallReferrerResponse.FEATURE_NOT_SUPPORTED:
if (BuildConfig.DEBUG) Log.d("InstallReferrerState", "FEATURE_NOT_SUPPORTED");
// API not available on the current Play Store app
break;
case InstallReferrerClient.InstallReferrerResponse.SERVICE_UNAVAILABLE:
if (BuildConfig.DEBUG) Log.d("InstallReferrerState", "SERVICE_UNAVAILABLE");
// Connection could not be established
break;
}
}
@Override public void onInstallReferrerServiceDisconnected() {
// Try to restart the connection on the next request to
// Google Play by calling the startConnection() method.
mReferrerClient.startConnection(installReferrerStateListener);
}
};
}
|
added more wide exception handling
|
android/src/main/java/com/learnium/RNDeviceInfo/RNInstallReferrerClient.java
|
added more wide exception handling
|
|
Java
|
mit
|
7ac7afe9eebda9b44aeb1b8925ef8bb12eb8cd93
| 0
|
sda97ghb/LearnWords
|
package com.divanoapps.learnwords.activities;
import android.annotation.SuppressLint;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.TextView;
import com.divanoapps.learnwords.data.RequestError;
import com.divanoapps.learnwords.dialogs.MessageOkDialogFragment;
import com.divanoapps.learnwords.data.DB;
import com.divanoapps.learnwords.entities.Card;
import com.divanoapps.learnwords.entities.CardId;
import com.divanoapps.learnwords.entities.DeckId;
import com.divanoapps.learnwords.R;
public class CardEditActivity extends AppCompatActivity {
enum Mode {
ADD_CARD,
EDIT_CARD
}
private CardId mCardId;
private DeckId mDeckId;
private int mDifficulty = Card.getDefaultDifficulty();
private boolean mVisibility = true;
private Mode mMode;
public static String getModeExtraName() {
return "MODE_EXTRA";
}
public static String getCardIdExtraName() {
return "CARD_ID_EXTRA";
}
public static String getDeckIdExtraName() {
return "CARD_ID_EXTRA";
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_card_edit);
// Setup toolbar
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
// Expand activity to make transparent notification bar
// getWindow().setFlags(WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS,
// WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS);
// Set difficulty adjustment buttons
findViewById(R.id.increase_difficulty_button)
.setOnClickListener(v -> onIncreaseDifficultyClicked());
findViewById(R.id.decrease_difficulty_button)
.setOnClickListener(v -> onDecreaseDifficultyClicked());
findViewById(R.id.visibility_button)
.setOnClickListener(v -> onVisibilityClicked());
// Run required mode
// TODO: Add exception when mode is not passed
mMode = (Mode) getIntent().getSerializableExtra(getModeExtraName());
switch (mMode) {
case ADD_CARD: runAddCardMode(); break;
case EDIT_CARD: runEditCardMode(); break;
}
}
private void runAddCardMode() {
mDeckId = (DeckId) getIntent().getSerializableExtra(getDeckIdExtraName());
}
private void runEditCardMode() {
mCardId = (CardId) getIntent().getSerializableExtra(getCardIdExtraName());
mDeckId = new DeckId(mCardId.getDeckName());
DB.getCard(mCardId)
.setOnDoneListener(this::onCardReceived)
.setOnErrorListener(this::onCardRequestError)
.execute();
}
@SuppressLint("SetTextI18n")
private void onCardReceived(Card card) {
((EditText) findViewById(R.id.word_edit)).setText(card.getWord());
((EditText) findViewById(R.id.word_comment_edit)).setText(card.getWordComment());
((EditText) findViewById(R.id.translation_edit)).setText(card.getTranslation());
((EditText) findViewById(R.id.translation_comment_edit)).setText(card.getTranslationComment());
mDifficulty = card.getDifficulty();
((TextView) findViewById(R.id.difficulty_view))
.setText(Integer.valueOf(card.getDifficulty()).toString());
((TextView) findViewById(R.id.difficulty_maximum_view))
.setText(Integer.valueOf(Card.getMaxDifficulty()).toString());
mVisibility = !card.isHidden();
((ImageButton) findViewById(R.id.visibility_button))
.setImageResource(mVisibility ?
R.drawable.ic_card_edit_visible :
R.drawable.ic_card_edit_invisible);
}
private void onCardRequestError(RequestError error) {
showErrorMessage(error.getMessage());
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_card_edit_activity_toolbar, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home: finish(); return true;// NavUtils.navigateUpFromSameTask(this); return true;
case R.id.action_done: onDoneClicked(); return true;
default: return super.onOptionsItemSelected(item);
}
}
private Card getCurrentStateAsCard() {
return new Card.Builder()
.setDeckName(mDeckId.getName())
.setWord(((EditText) findViewById(R.id.word_edit)).getText().toString())
.setWordComment(((EditText) findViewById(R.id.word_comment_edit)).getText().toString())
.setTranslation(((EditText) findViewById(R.id.translation_edit)).getText().toString())
.setTranslationComment(((EditText) findViewById(R.id.translation_comment_edit)).getText().toString())
.setDifficulty(mDifficulty)
.setHidden(!mVisibility)
.setCropPicture(false)
.setPictureUrl("")
.build();
}
private void showErrorMessage(String message) {
MessageOkDialogFragment.show(this, message);
}
private void showErrorMessage(RequestError error) {
showErrorMessage(error.getMessage());
}
private void showCardAlreadyExistsErrorMessage() {
final String message = getString(R.string.card_with_id_already_exists);
showErrorMessage(message);
}
private void onDoneClicked() {
// TODO: add exception when card id is empty
switch (mMode) {
case ADD_CARD: addCard(); break;
case EDIT_CARD: editCard(); break;
}
}
private void addCard() {
final Card card = getCurrentStateAsCard();
DB.getCard(card.getId())
.setOnDoneListener(this::showCardAlreadyExistsErrorMessage)
.setOnErrorListener(() ->
DB.saveCard(card)
.setOnDoneListener(this::finish)
.setOnErrorListener(this::showErrorMessage)
.execute()
)
.execute();
}
private void editCard() {
final Card card = getCurrentStateAsCard();
DB.updateCard(mCardId, card)
.setOnDoneListener(this::finish)
.setOnErrorListener(error -> {
if (error.getType() == RequestError.Type.AlreadyExists)
showCardAlreadyExistsErrorMessage();
else
showErrorMessage(error.getMessage());
})
.execute();
}
@SuppressLint("SetTextI18n")
private void onIncreaseDifficultyClicked() {
++ mDifficulty;
if (mDifficulty > Card.getMaxDifficulty())
mDifficulty = Card.getMaxDifficulty();
((TextView) findViewById(R.id.difficulty_view))
.setText(Integer.valueOf(mDifficulty).toString());
}
@SuppressLint("SetTextI18n")
private void onDecreaseDifficultyClicked() {
-- mDifficulty;
if (mDifficulty < Card.getMinDifficulty())
mDifficulty = Card.getMinDifficulty();
((TextView) findViewById(R.id.difficulty_view))
.setText(Integer.valueOf(mDifficulty).toString());
}
private void onVisibilityClicked() {
mVisibility = !mVisibility;
((ImageButton) findViewById(R.id.visibility_button))
.setImageResource(mVisibility ?
R.drawable.ic_card_edit_visible :
R.drawable.ic_card_edit_invisible);
}
}
|
app/src/main/java/com/divanoapps/learnwords/activities/CardEditActivity.java
|
package com.divanoapps.learnwords.activities;
import android.annotation.SuppressLint;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.EditText;
import android.widget.TextView;
import com.divanoapps.learnwords.data.RequestError;
import com.divanoapps.learnwords.dialogs.MessageOkDialogFragment;
import com.divanoapps.learnwords.data.DB;
import com.divanoapps.learnwords.entities.Card;
import com.divanoapps.learnwords.entities.CardId;
import com.divanoapps.learnwords.entities.DeckId;
import com.divanoapps.learnwords.R;
public class CardEditActivity extends AppCompatActivity {
enum Mode {
ADD_CARD,
EDIT_CARD
}
private CardId mCardId;
private DeckId mDeckId;
private int mDifficulty = Card.getDefaultDifficulty();
private boolean mVisibility = true;
private Mode mMode;
public static String getModeExtraName() {
return "MODE_EXTRA";
}
public static String getCardIdExtraName() {
return "CARD_ID_EXTRA";
}
public static String getDeckIdExtraName() {
return "CARD_ID_EXTRA";
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_card_edit);
// Setup toolbar
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
// Expand activity to make transparent notification bar
// getWindow().setFlags(WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS,
// WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS);
// Set difficulty adjustment buttons
findViewById(R.id.increase_difficulty_button)
.setOnClickListener(v -> onIncreaseDifficultyClicked());
findViewById(R.id.decrease_difficulty_button)
.setOnClickListener(v -> onDecreaseDifficultyClicked());
// Run required mode
// TODO: Add exception when mode is not passed
mMode = (Mode) getIntent().getSerializableExtra(getModeExtraName());
switch (mMode) {
case ADD_CARD: runAddCardMode(); break;
case EDIT_CARD: runEditCardMode(); break;
}
}
private void runAddCardMode() {
mDeckId = (DeckId) getIntent().getSerializableExtra(getDeckIdExtraName());
}
private void runEditCardMode() {
mCardId = (CardId) getIntent().getSerializableExtra(getCardIdExtraName());
mDeckId = new DeckId(mCardId.getDeckName());
DB.getCard(mCardId)
.setOnDoneListener(this::onCardReceived)
.setOnErrorListener(this::onCardRequestError)
.execute();
}
@SuppressLint("SetTextI18n")
private void onCardReceived(Card card) {
((EditText) findViewById(R.id.word_edit)).setText(card.getWord());
((EditText) findViewById(R.id.word_comment_edit)).setText(card.getWordComment());
((EditText) findViewById(R.id.translation_edit)).setText(card.getTranslation());
((EditText) findViewById(R.id.translation_comment_edit)).setText(card.getTranslationComment());
mDifficulty = card.getDifficulty();
((TextView) findViewById(R.id.difficulty_view))
.setText(Integer.valueOf(card.getDifficulty()).toString());
((TextView) findViewById(R.id.difficulty_maximum_view))
.setText(Integer.valueOf(Card.getMaxDifficulty()).toString());
}
private void onCardRequestError(RequestError error) {
showErrorMessage(error.getMessage());
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_card_edit_activity_toolbar, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home: finish(); return true;// NavUtils.navigateUpFromSameTask(this); return true;
case R.id.action_done: onDoneClicked(); return true;
default: return super.onOptionsItemSelected(item);
}
}
private Card getCurrentStateAsCard() {
return new Card.Builder()
.setDeckName(mDeckId.getName())
.setWord(((EditText) findViewById(R.id.word_edit)).getText().toString())
.setWordComment(((EditText) findViewById(R.id.word_comment_edit)).getText().toString())
.setTranslation(((EditText) findViewById(R.id.translation_edit)).getText().toString())
.setTranslationComment(((EditText) findViewById(R.id.translation_comment_edit)).getText().toString())
.setDifficulty(mDifficulty)
.setHidden(mVisibility)
.setCropPicture(false)
.setPictureUrl("")
.build();
}
private void showErrorMessage(String message) {
MessageOkDialogFragment.show(this, message);
}
private void showErrorMessage(RequestError error) {
showErrorMessage(error.getMessage());
}
private void showCardAlreadyExistsErrorMessage() {
final String message = getString(R.string.card_with_id_already_exists);
showErrorMessage(message);
}
private void onDoneClicked() {
// TODO: add exception when card id is empty
switch (mMode) {
case ADD_CARD: addCard(); break;
case EDIT_CARD: editCard(); break;
}
}
private void addCard() {
final Card card = getCurrentStateAsCard();
DB.getCard(card.getId())
.setOnDoneListener(this::showCardAlreadyExistsErrorMessage)
.setOnErrorListener(() ->
DB.saveCard(card)
.setOnDoneListener(this::finish)
.setOnErrorListener(this::showErrorMessage)
.execute()
)
.execute();
}
private void editCard() {
final Card card = getCurrentStateAsCard();
DB.updateCard(mCardId, card)
.setOnDoneListener(this::finish)
.setOnErrorListener(error -> {
if (error.getType() == RequestError.Type.AlreadyExists)
showCardAlreadyExistsErrorMessage();
else
showErrorMessage(error.getMessage());
})
.execute();
}
@SuppressLint("SetTextI18n")
private void onIncreaseDifficultyClicked() {
++ mDifficulty;
if (mDifficulty > Card.getMaxDifficulty())
mDifficulty = Card.getMaxDifficulty();
((TextView) findViewById(R.id.difficulty_view))
.setText(Integer.valueOf(mDifficulty).toString());
}
@SuppressLint("SetTextI18n")
private void onDecreaseDifficultyClicked() {
-- mDifficulty;
if (mDifficulty < Card.getMinDifficulty())
mDifficulty = Card.getMinDifficulty();
((TextView) findViewById(R.id.difficulty_view))
.setText(Integer.valueOf(mDifficulty).toString());
}
}
|
Fixed toggle visibility button in CardEditActivity.
|
app/src/main/java/com/divanoapps/learnwords/activities/CardEditActivity.java
|
Fixed toggle visibility button in CardEditActivity.
|
|
Java
|
mit
|
f0e38aa575ba0b2e2490845e80b77d30f9292006
| 0
|
armandgray/taapProject,armandgray/taapProject,armandgray/taapProject
|
package com.armandgray.taap.utils;
import android.annotation.SuppressLint;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import com.armandgray.taap.BuildConfig;
import com.armandgray.taap.R;
import com.armandgray.taap.models.SessionLog;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.annotation.Config;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import static com.armandgray.taap.utils.SessionLogRvAdapter.IMAGE_RESOURCE_ID;
import static com.armandgray.taap.utils.SessionLogRvAdapter.ITEM_DATA;
import static com.armandgray.taap.utils.SessionLogRvAdapter.STRING_RESOURCE_ID;
import static com.armandgray.taap.utils.SessionLogRvAdapter.TYPE_HEADER;
import static com.armandgray.taap.utils.SessionLogRvAdapter.TYPE_ITEM;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
@RunWith(RobolectricTestRunner.class)
@Config(constants = BuildConfig.class)
public class SessionLogRvAdapterTest {
private SessionLogRvAdapter adapter;
private View mockView;
private SessionLog testSessionLog;
@Before
public void setUp() {
System.out.println("Running Set Up!");
adapter = new SessionLogRvAdapter(null);
mockView = mock(View.class);
testSessionLog = new SessionLog.Builder()
.sessionLength(new Date(1, 1, 1, 1, 5, 30))
.sessionGoal(new Date(0))
.activeWork(new Date(0))
.restTime(new Date(0))
.setsCompleted(0)
.repsCompleted(0)
.successRate(0.47)
.successRecord(0.0)
.create();
}
@Test
public void doesImplementAdapter() throws Exception {
RecyclerView.Adapter<SessionLogRvAdapter.SessionLogViewHolder> adapter =
new SessionLogRvAdapter(testSessionLog);
assertNotNull(adapter);
}
@Test
public void onCreateViewHolder_ReturnsNewSessionLogViewHolderOfCorrectLayout() {
TestableRvSummaryAdapter testableAdapter = new TestableRvSummaryAdapter();
testableAdapter.setMockView(mockView);
SessionLogRvAdapter.SessionLogViewHolder sessionLogViewHolder = testableAdapter
.onCreateViewHolder(new FrameLayout(RuntimeEnvironment.application), 0);
assertEquals(mockView, sessionLogViewHolder.itemView);
}
static class TestableRvSummaryAdapter extends SessionLogRvAdapter {
View mockView;
void setMockView(View mockView) {
this.mockView = mockView;
}
@Override
View getLayout(ViewGroup parent) {
return mockView;
}
}
@SuppressLint("InflateParams")
@Test
public void onBindViewHolder_DoesSetViewsForSessionLogHeader() {
adapter = new SessionLogRvAdapter(testSessionLog);
LayoutInflater inflater = (LayoutInflater) RuntimeEnvironment.application
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
SessionLogRvAdapter.SessionLogViewHolder holder =
new SessionLogRvAdapter.SessionLogHeaderViewHolder(
inflater.inflate(R.layout.session_log_header_layout, null, false));
adapter.onBindViewHolder(holder, 0);
Date date = testSessionLog.getSessionDate();
String expectedDate = new SimpleDateFormat("EEE, MMM d, ''yy", Locale.US)
.format(date);
assertEquals(R.string.session_date, adapter.getItemAtPosition(0).get(STRING_RESOURCE_ID));
assertEquals(expectedDate, holder.tvText.getText());
}
@SuppressLint("InflateParams")
@Test
public void onBindViewHolder_DoesSetViewsForSessionLogItem() {
adapter = new SessionLogRvAdapter(testSessionLog);
LayoutInflater inflater = (LayoutInflater) RuntimeEnvironment.application
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
SessionLogRvAdapter.SessionLogViewHolder holder =
new SessionLogRvAdapter.SessionLogViewHolder(
inflater.inflate(R.layout.session_log_listitem, null, false));
adapter.onBindViewHolder(holder, 1);
Calendar calendar = Calendar.getInstance();
calendar.setTime(testSessionLog.getSessionLength());
int hour = calendar.get(Calendar.HOUR_OF_DAY);
SimpleDateFormat simpleDateFormat =
hour == 0
? new SimpleDateFormat("00:mm:ss", Locale.US)
: new SimpleDateFormat("hh:mm:ss", Locale.US);
assertEquals("Session Length", holder.tvHeader.getText());
assertEquals(simpleDateFormat.format(calendar.getTime()), holder.tvText.getText());
assertEquals(RuntimeEnvironment.application.getResources().getDrawable(
R.drawable.ic_timer_white_24dp),
holder.ivImage.getDrawable());
}
@SuppressLint("InflateParams")
@Test
public void onBindViewHolder_DoesSetViewsForSessionLogItem_Ints() {
adapter = new SessionLogRvAdapter(testSessionLog);
LayoutInflater inflater = (LayoutInflater) RuntimeEnvironment.application
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
SessionLogRvAdapter.SessionLogViewHolder holder =
new SessionLogRvAdapter.SessionLogViewHolder(
inflater.inflate(R.layout.session_log_listitem, null, false));
adapter.onBindViewHolder(holder, 5);
assertEquals("Sets Completed", holder.tvHeader.getText());
assertEquals(String.valueOf(testSessionLog.getSetsCompleted()), holder.tvText.getText());
assertEquals(RuntimeEnvironment.application.getResources().getDrawable(
R.drawable.ic_fitness_center_white_24dp),
holder.ivImage.getDrawable());
}
@SuppressLint("InflateParams")
@Test
public void onBindViewHolder_DoesSetViewsForSessionLogItem_Percents() {
adapter = new SessionLogRvAdapter(testSessionLog);
LayoutInflater inflater = (LayoutInflater) RuntimeEnvironment.application
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
SessionLogRvAdapter.SessionLogViewHolder holder =
new SessionLogRvAdapter.SessionLogViewHolder(
inflater.inflate(R.layout.session_log_listitem, null, false));
adapter.onBindViewHolder(holder, 7);
Double rate = testSessionLog.getSuccessRate() * 100;
assertEquals("Success Rate", holder.tvHeader.getText());
assertEquals(String.format(Locale.US, "%d%", rate.intValue()), holder.tvText.getText());
assertEquals(RuntimeEnvironment.application.getResources().getDrawable(
R.drawable.ic_timer_white_24dp),
holder.ivImage.getDrawable());
}
@Test
public void canGetItemCount() throws Exception {
adapter = new SessionLogRvAdapter(testSessionLog);
assertEquals(9, adapter.getItemCount());
}
@Test
public void canGetItemAtPosition() throws Exception {
adapter = new SessionLogRvAdapter(testSessionLog);
HashMap<String, Object> hashMap = new HashMap<>();
hashMap.put(STRING_RESOURCE_ID, R.string.session_date);
hashMap.put(ITEM_DATA, testSessionLog.getSessionDate());
hashMap.put(IMAGE_RESOURCE_ID, R.drawable.ic_timer_white_24dp);
assertEquals(hashMap, adapter.getItemAtPosition(0));
}
@Test
public void canGetItemViewType() throws Exception {
adapter = new SessionLogRvAdapter(testSessionLog);
assertEquals(TYPE_HEADER, adapter.getItemViewType(0));
assertEquals(TYPE_ITEM, adapter.getItemViewType(1));
}
@After
public void tearDown() {
System.out.println("Running TearDown!");
adapter = null;
mockView = null;
testSessionLog = null;
}
}
|
TAAP/app/src/test/java/com/armandgray/taap/utils/SessionLogRvAdapterTest.java
|
package com.armandgray.taap.utils;
import android.annotation.SuppressLint;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import com.armandgray.taap.BuildConfig;
import com.armandgray.taap.R;
import com.armandgray.taap.models.SessionLog;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.annotation.Config;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import static com.armandgray.taap.utils.SessionLogRvAdapter.IMAGE_RESOURCE_ID;
import static com.armandgray.taap.utils.SessionLogRvAdapter.ITEM_DATA;
import static com.armandgray.taap.utils.SessionLogRvAdapter.STRING_RESOURCE_ID;
import static com.armandgray.taap.utils.SessionLogRvAdapter.TYPE_HEADER;
import static com.armandgray.taap.utils.SessionLogRvAdapter.TYPE_ITEM;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
@RunWith(RobolectricTestRunner.class)
@Config(constants = BuildConfig.class)
public class SessionLogRvAdapterTest {
private SessionLogRvAdapter adapter;
private View mockView;
private SessionLog testSessionLog;
@Before
public void setUp() {
System.out.println("Running Set Up!");
adapter = new SessionLogRvAdapter(null);
mockView = mock(View.class);
testSessionLog = new SessionLog.Builder()
.sessionLength(new Date(1, 1, 1, 1, 5, 30))
.sessionGoal(new Date(0))
.activeWork(new Date(0))
.restTime(new Date(0))
.setsCompleted(0)
.repsCompleted(0)
.successRate(0.47)
.successRecord(0.0)
.create();
}
@Test
public void doesImplementAdapter() throws Exception {
RecyclerView.Adapter<SessionLogRvAdapter.SessionLogViewHolder> adapter =
new SessionLogRvAdapter(testSessionLog);
assertNotNull(adapter);
}
@Test
public void onCreateViewHolder_ReturnsNewSessionLogViewHolderOfCorrectLayout() {
TestableRvSummaryAdapter testableAdapter = new TestableRvSummaryAdapter();
testableAdapter.setMockView(mockView);
SessionLogRvAdapter.SessionLogViewHolder sessionLogViewHolder = testableAdapter
.onCreateViewHolder(new FrameLayout(RuntimeEnvironment.application), 0);
assertEquals(mockView, sessionLogViewHolder.itemView);
}
static class TestableRvSummaryAdapter extends SessionLogRvAdapter {
View mockView;
void setMockView(View mockView) {
this.mockView = mockView;
}
@Override
View getLayout(ViewGroup parent) {
return mockView;
}
}
@SuppressLint("InflateParams")
@Test
public void onBindViewHolder_DoesSetViewsForSessionLogHeader() {
adapter = new SessionLogRvAdapter(testSessionLog);
LayoutInflater inflater = (LayoutInflater) RuntimeEnvironment.application
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
SessionLogRvAdapter.SessionLogViewHolder holder =
new SessionLogRvAdapter.SessionLogHeaderViewHolder(
inflater.inflate(R.layout.session_log_header_layout, null, false));
adapter.onBindViewHolder(holder, 0);
Date date = testSessionLog.getSessionDate();
String expectedDate = new SimpleDateFormat("EEE, MMM d, ''yy", Locale.US)
.format(date);
assertEquals(R.string.session_date, adapter.getItemAtPosition(0).get(STRING_RESOURCE_ID));
assertEquals(expectedDate, holder.tvText.getText());
}
@SuppressLint("InflateParams")
@Test
public void onBindViewHolder_DoesSetViewsForSessionLogItem() {
adapter = new SessionLogRvAdapter(testSessionLog);
LayoutInflater inflater = (LayoutInflater) RuntimeEnvironment.application
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
SessionLogRvAdapter.SessionLogViewHolder holder =
new SessionLogRvAdapter.SessionLogViewHolder(
inflater.inflate(R.layout.session_log_listitem, null, false));
adapter.onBindViewHolder(holder, 1);
Date date = testSessionLog.getSessionLength();
SimpleDateFormat simpleDateFormat =
date.equals(new Date(0))
? new SimpleDateFormat("00:00:00", Locale.US)
: new SimpleDateFormat("hh:mm:ss", Locale.US);
assertEquals("Session Length", holder.tvHeader.getText());
assertEquals(simpleDateFormat.format(date), holder.tvText.getText());
assertEquals(RuntimeEnvironment.application.getResources().getDrawable(
R.drawable.ic_timer_white_24dp),
holder.ivImage.getDrawable());
}
@SuppressLint("InflateParams")
@Test
public void onBindViewHolder_DoesSetViewsForSessionLogItem_Ints() {
adapter = new SessionLogRvAdapter(testSessionLog);
LayoutInflater inflater = (LayoutInflater) RuntimeEnvironment.application
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
SessionLogRvAdapter.SessionLogViewHolder holder =
new SessionLogRvAdapter.SessionLogViewHolder(
inflater.inflate(R.layout.session_log_listitem, null, false));
adapter.onBindViewHolder(holder, 5);
assertEquals("Sets Completed", holder.tvHeader.getText());
assertEquals(String.valueOf(testSessionLog.getSetsCompleted()), holder.tvText.getText());
assertEquals(RuntimeEnvironment.application.getResources().getDrawable(
R.drawable.ic_fitness_center_white_24dp),
holder.ivImage.getDrawable());
}
@SuppressLint("InflateParams")
@Test
public void onBindViewHolder_DoesSetViewsForSessionLogItem_Percents() {
adapter = new SessionLogRvAdapter(testSessionLog);
LayoutInflater inflater = (LayoutInflater) RuntimeEnvironment.application
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
SessionLogRvAdapter.SessionLogViewHolder holder =
new SessionLogRvAdapter.SessionLogViewHolder(
inflater.inflate(R.layout.session_log_listitem, null, false));
adapter.onBindViewHolder(holder, 7);
Double rate = testSessionLog.getSuccessRate() * 100;
assertEquals("Success Rate", holder.tvHeader.getText());
assertEquals(String.format(Locale.US, "%d%", rate.intValue()), holder.tvText.getText());
assertEquals(RuntimeEnvironment.application.getResources().getDrawable(
R.drawable.ic_timer_white_24dp),
holder.ivImage.getDrawable());
}
@Test
public void canGetItemCount() throws Exception {
adapter = new SessionLogRvAdapter(testSessionLog);
assertEquals(9, adapter.getItemCount());
}
@Test
public void canGetItemAtPosition() throws Exception {
adapter = new SessionLogRvAdapter(testSessionLog);
HashMap<String, Object> hashMap = new HashMap<>();
hashMap.put(STRING_RESOURCE_ID, R.string.session_date);
hashMap.put(ITEM_DATA, testSessionLog.getSessionDate());
hashMap.put(IMAGE_RESOURCE_ID, R.drawable.ic_timer_white_24dp);
assertEquals(hashMap, adapter.getItemAtPosition(0));
}
@Test
public void canGetItemViewType() throws Exception {
adapter = new SessionLogRvAdapter(testSessionLog);
assertEquals(TYPE_HEADER , adapter.getItemViewType(0));
assertEquals(TYPE_ITEM , adapter.getItemViewType(1));
}
@After
public void tearDown() {
System.out.println("Running TearDown!");
adapter = null;
mockView = null;
testSessionLog = null;
}
}
|
adjusted code for onBindViewHolder_DoesSetViewsForSessionLogItem to match formatting
|
TAAP/app/src/test/java/com/armandgray/taap/utils/SessionLogRvAdapterTest.java
|
adjusted code for onBindViewHolder_DoesSetViewsForSessionLogItem to match formatting
|
|
Java
|
mit
|
3db6d812a87cb8c92d32d80168e15bdeb7e7daa4
| 0
|
tobiatesan/serleena-android,tobiatesan/serleena-android
|
///////////////////////////////////////////////////////////////////////////////
//
// This file is part of Serleena.
//
// The MIT License (MIT)
//
// Copyright (C) 2015 Antonio Cavestro, Gabriele Pozzan, Matteo Lisotto,
// Nicola Mometto, Filippo Sestini, Tobia Tesan, Sebastiano Valle.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
///////////////////////////////////////////////////////////////////////////////
/**
* Name: SerleenaActivity
* Package: com.kyloth.serleena.activity
* Author: Filippo Sestini
*
* History:
* Version Programmer Changes
* 1.0.0 Filippo Sestini Creazione del file, scrittura del codice e di
* Javadoc
*/
package com.kyloth.serleena.activity;
import android.app.Fragment;
import android.os.Bundle;
import android.app.Activity;
import android.view.KeyEvent;
import com.kyloth.serleena.R;
import com.kyloth.serleena.model.*;
import com.kyloth.serleena.persistence.IPersistenceDataSink;
import com.kyloth.serleena.persistence.IPersistenceDataSource;
import com.kyloth.serleena.persistence.sqlite.CachedSQLiteDataSource;
import com.kyloth.serleena.persistence.sqlite.SerleenaDatabase;
import com.kyloth.serleena.persistence.sqlite.SerleenaSQLiteDataSink;
import com.kyloth.serleena.persistence.sqlite.SerleenaSQLiteDataSource;
import com.kyloth.serleena.presentation.*;
import com.kyloth.serleena.presenters.*;
import com.kyloth.serleena.sensors.*;
import com.kyloth.serleena.synchronization.KylothCloudSynchronizer;
import com.kyloth.serleena.synchronization.net.INetProxy;
import com.kyloth.serleena.synchronization.net.SerleenaJSONNetProxy;
import com.kyloth.serleena.synchronization.kylothcloud.LocalEnvKylothIdSource;
import com.kyloth.serleena.view.fragments.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
/**
* Classe che implementa ISerleenaActivity.
*
* Rappresenta l'unica Activity dell'applicazione, il punto di accesso
* principale alle sue risorse. Si occupa di creare le viste e i presenter, e
* agganciarli le une agli altri.
*
* @use Ogni presenter dell'applicazione mantiene un riferimento all'Activity dietro interfaccia ISerleenaActivity.
* @field application : ISerleenaApplication Applicazione serleena
* @field trackFragment : TrackFragment Visuale Percorso
* @field compassFragment : CompassFragment Schermata Bussola
* @field contactsFragment : ContactsFragment Schermata Autorita` locali
* @field telemetryFragment : TelemetryFragment Visuale Tracciamento
* @field weatherFragment : WeatherFragment Schermata Meteo
* @field mapFragment : MapFragment Visuale Mappa
* @field experienceSelectionFragment : ExperienceSelectionFragment Imposta Esperienza
* @field trackSelectionFragment : TrackSelectionFragment Visuale Imposta Percorso
* @field menuFragment : ObjectListFragment Schermata menu` principale
* @field experienceFragment : ObjectListFragment Schermata Esperienza
* @field syncFragment : SyncFragment Schermata Sincronizza
* @author Filippo Sestini <valle.sebastiano93@gmail.com>
* @version 1.0.0
* @see android.support.v7.app.AppCompatActivity
*/
public class SerleenaActivity extends Activity
implements ISerleenaActivity, IObjectListObserver {
private ISerleenaApplication application;
private TrackFragment trackFragment;
private CompassFragment compassFragment;
private ContactsFragment contactsFragment;
private TelemetryFragment telemetryFragment;
private WeatherFragment weatherFragment;
private MapFragment mapFragment;
private ExperienceSelectionFragment experienceSelectionFragment;
private TrackSelectionFragment trackSelectionFragment;
private ObjectListFragment menuFragment;
private ObjectListFragment experienceFragment;
private SyncFragment syncFragment;
/**
* Ridefinisce Activity.onCreate().
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_serleena);
application = (ISerleenaApplication) getApplication();
if (findViewById(R.id.main_container) != null) {
if (savedInstanceState != null)
return;
initFragments();
initPresenters();
getFragmentManager().beginTransaction()
.add(R.id.main_container, menuFragment).commit();
}
}
/**
* Ridefinisce Activity.onDestroy()
*
* Annulla l'attraversamento del Percorso alla chiusura dell'Activity,
* evitando che risorse in background rimangano attive anche ad applicazione
* terminata.
*/
@Override
protected void onDestroy() {
super.onDestroy();
getSensorManager().getTrackCrossingManager().abort();
}
/**
* Ridefinisce Activity.onKeyDown().
*/
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_MENU) {
getFragmentManager().beginTransaction()
.replace(R.id.main_container, menuFragment).commit();
return true;
}
return super.onKeyDown(keyCode, event);
}
/**
* Implementa ISerleenaActivity.getDataSource().
*
* Inoltra l'oggetto ISerleenaDataSource restituito dall'applicazione
* ISerleenaApplication.
*/
@Override
public ISerleenaDataSource getDataSource() {
return application.getDataSource();
}
/**
* Implementa ISerleenaActivity.getSensorManager().
*
* Inoltra l'oggetto ISensorManager restituito dall'applicazione
* ISerleenaApplication.
*/
@Override
public ISensorManager getSensorManager() {
return application.getSensorManager();
}
/**
* Implementa ISerleenaActivity.getDataSink().
*
* Inoltra l'oggetto IPersistenceDataSink restituito dall'applicazione
* ISerleenaApplication.
*/
@Override
public IPersistenceDataSink getDataSink() {
return application.getDataSink();
}
/**
* Implementa IObjectListObserver.onObjectSelected().
*
* Riceve eventi di selezione da parte delle schermate Esperienza e dal
* menu principale. Il Fragment selezionato viene visualizzato in primo
* piano.
*
* @param obj Oggetto selezionato.
*/
@Override
public void onObjectSelected(Object obj) {
Fragment f = (Fragment) obj;
getFragmentManager().beginTransaction()
.replace(R.id.main_container, f).addToBackStack("fragment")
.commit();
}
/**
* Inizializza i Fragment.
*/
private void initFragments() {
trackFragment = new TrackFragment();
compassFragment = new CompassFragment();
contactsFragment = new ContactsFragment();
experienceSelectionFragment = new ExperienceSelectionFragment();
trackSelectionFragment = new TrackSelectionFragment();
telemetryFragment = new TelemetryFragment();
weatherFragment = new WeatherFragment();
mapFragment = new MapFragment();
syncFragment = new SyncFragment();
experienceFragment = new ObjectListFragment() {
@Override
public String toString() {
return "Esperienza";
}
};
menuFragment = new ObjectListFragment();
ArrayList<Object> expList = new ArrayList<>();
expList.add(telemetryFragment);
expList.add(mapFragment);
expList.add(experienceSelectionFragment);
expList.add(trackSelectionFragment);
expList.add(trackFragment);
experienceFragment.setList(expList);
experienceFragment.attachObserver(this);
ArrayList<Object> menuList = new ArrayList<>();
menuList.add(experienceFragment);
menuList.add(weatherFragment);
menuList.add(contactsFragment);
menuList.add(compassFragment);
menuList.add(syncFragment);
menuList.add(new QuitFragment());
menuFragment.setList(menuList);
menuFragment.attachObserver(this);
}
/**
* Inizializza i Presenter.
*/
private void initPresenters() {
new CompassPresenter(compassFragment, this);
new ContactsPresenter(contactsFragment, this);
ExperienceSelectionPresenter esp =
new ExperienceSelectionPresenter(
experienceSelectionFragment, this);
new MapPresenter(mapFragment, this, esp);
new TrackSelectionPresenter(trackSelectionFragment, this, esp);
new WeatherPresenter(weatherFragment, this);
new TrackPresenter(trackFragment, this);
new TelemetryPresenter(telemetryFragment, this);
new SyncPresenter(syncFragment, this);
}
}
|
serleena/app/src/main/java/com/kyloth/serleena/activity/SerleenaActivity.java
|
///////////////////////////////////////////////////////////////////////////////
//
// This file is part of Serleena.
//
// The MIT License (MIT)
//
// Copyright (C) 2015 Antonio Cavestro, Gabriele Pozzan, Matteo Lisotto,
// Nicola Mometto, Filippo Sestini, Tobia Tesan, Sebastiano Valle.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
///////////////////////////////////////////////////////////////////////////////
/**
* Name: SerleenaActivity
* Package: com.kyloth.serleena.activity
* Author: Filippo Sestini
*
* History:
* Version Programmer Changes
* 1.0.0 Filippo Sestini Creazione del file, scrittura del codice e di
* Javadoc
*/
package com.kyloth.serleena.activity;
import android.app.Fragment;
import android.os.Bundle;
import android.app.Activity;
import android.view.KeyEvent;
import com.kyloth.serleena.R;
import com.kyloth.serleena.model.*;
import com.kyloth.serleena.persistence.IPersistenceDataSink;
import com.kyloth.serleena.persistence.IPersistenceDataSource;
import com.kyloth.serleena.persistence.sqlite.CachedSQLiteDataSource;
import com.kyloth.serleena.persistence.sqlite.SerleenaDatabase;
import com.kyloth.serleena.persistence.sqlite.SerleenaSQLiteDataSink;
import com.kyloth.serleena.persistence.sqlite.SerleenaSQLiteDataSource;
import com.kyloth.serleena.presentation.*;
import com.kyloth.serleena.presenters.*;
import com.kyloth.serleena.sensors.*;
import com.kyloth.serleena.synchronization.KylothCloudSynchronizer;
import com.kyloth.serleena.synchronization.net.INetProxy;
import com.kyloth.serleena.synchronization.net.SerleenaJSONNetProxy;
import com.kyloth.serleena.synchronization.kylothcloud.LocalEnvKylothIdSource;
import com.kyloth.serleena.view.fragments.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
/**
* Classe che implementa ISerleenaActivity.
*
* In questa visuale è possibile selezionare un'esperienza da attivare tra quelle disponibili.
*
* @use Viene utilizzata solamente dall'Activity, che ne mantiene un riferimento. Il Presenter, alla creazione, si registra alla sua Vista, passando se stesso come parametro dietro interfaccia.
* @field dataSource : sorgente dati utilizzata dall'activity e dai suoi presenter
* @field sensorManager : gestore dei sensori utilizzato dall'activity e dai suoi presenter
* @author Filippo Sestini <valle.sebastiano93@gmail.com>
* @version 1.0.0
* @see android.support.v7.app.AppCompatActivity
*/
public class SerleenaActivity extends Activity
implements ISerleenaActivity, IObjectListObserver {
private ISerleenaApplication application;
private TrackFragment trackFragment;
private CompassFragment compassFragment;
private ContactsFragment contactsFragment;
private TelemetryFragment telemetryFragment;
private WeatherFragment weatherFragment;
private MapFragment mapFragment;
private ExperienceSelectionFragment experienceSelectionFragment;
private TrackSelectionFragment trackSelectionFragment;
private ObjectListFragment menuFragment;
private ObjectListFragment experienceFragment;
private SyncFragment syncFragment;
/**
* Ridefinisce Activity.onCreate().
*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_serleena);
application = (ISerleenaApplication) getApplication();
if (findViewById(R.id.main_container) != null) {
if (savedInstanceState != null)
return;
initFragments();
initPresenters();
getFragmentManager().beginTransaction()
.add(R.id.main_container, menuFragment).commit();
}
}
/**
* Ridefinisce Activity.onDestroy()
*
* Annulla l'attraversamento del Percorso alla chiusura dell'Activity,
* evitando che risorse in background rimangano attive anche ad applicazione
* terminata.
*/
@Override
protected void onDestroy() {
super.onDestroy();
getSensorManager().getTrackCrossingManager().abort();
}
/**
* Ridefinisce Activity.onKeyDown().
*/
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_MENU) {
getFragmentManager().beginTransaction()
.replace(R.id.main_container, menuFragment).commit();
return true;
}
return super.onKeyDown(keyCode, event);
}
/**
* Implementa ISerleenaActivity.getDataSource().
*/
@Override
public ISerleenaDataSource getDataSource() {
return application.getDataSource();
}
/**
* Implementa ISerleenaActivity.getSensorManager().
*/
@Override
public ISensorManager getSensorManager() {
return application.getSensorManager();
}
/**
* Implementa ISerleenaActivity.getDataSink().
*/
@Override
public IPersistenceDataSink getDataSink() {
return application.getDataSink();
}
/**
* Implementa IObjectListObserver.onObjectSelected().
*
* @param obj Oggetto selezionato.
*/
@Override
public void onObjectSelected(Object obj) {
Fragment f = (Fragment) obj;
getFragmentManager().beginTransaction()
.replace(R.id.main_container, f).addToBackStack("fragment")
.commit();
}
private void initFragments() {
trackFragment = new TrackFragment();
compassFragment = new CompassFragment();
contactsFragment = new ContactsFragment();
experienceSelectionFragment = new ExperienceSelectionFragment();
trackSelectionFragment = new TrackSelectionFragment();
telemetryFragment = new TelemetryFragment();
weatherFragment = new WeatherFragment();
mapFragment = new MapFragment();
syncFragment = new SyncFragment();
experienceFragment = new ObjectListFragment() {
@Override
public String toString() {
return "Esperienza";
}
};
menuFragment = new ObjectListFragment();
ArrayList<Object> expList = new ArrayList<>();
expList.add(telemetryFragment);
expList.add(mapFragment);
expList.add(experienceSelectionFragment);
expList.add(trackSelectionFragment);
expList.add(trackFragment);
experienceFragment.setList(expList);
experienceFragment.attachObserver(this);
ArrayList<Object> menuList = new ArrayList<>();
menuList.add(experienceFragment);
menuList.add(weatherFragment);
menuList.add(contactsFragment);
menuList.add(compassFragment);
menuList.add(syncFragment);
menuList.add(new QuitFragment());
menuFragment.setList(menuList);
menuFragment.attachObserver(this);
}
private void initPresenters() {
new CompassPresenter(compassFragment, this);
new ContactsPresenter(contactsFragment, this);
ExperienceSelectionPresenter esp =
new ExperienceSelectionPresenter(
experienceSelectionFragment, this);
new MapPresenter(mapFragment, this, esp);
new TrackSelectionPresenter(trackSelectionFragment, this, esp);
new WeatherPresenter(weatherFragment, this);
new TrackPresenter(trackFragment, this);
new TelemetryPresenter(telemetryFragment, this);
new SyncPresenter(syncFragment, this);
}
}
|
ACT: Aggiorna Javadoc SerleenaActivity
|
serleena/app/src/main/java/com/kyloth/serleena/activity/SerleenaActivity.java
|
ACT: Aggiorna Javadoc SerleenaActivity
|
|
Java
|
mit
|
a52334a3f3af1cedbf31a94904f7fe7409d43a44
| 0
|
dbsoftcombr/dbssdk
|
package br.com.dbsoft.io;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.faces.context.FacesContext;
import javax.faces.event.PhaseId;
import javax.faces.model.DataModelEvent;
import javax.faces.model.DataModelListener;
import javax.servlet.jsp.jstl.sql.Result;
import javax.servlet.jsp.jstl.sql.ResultSupport;
import br.com.dbsoft.annotation.DBSTableModel;
import br.com.dbsoft.core.DBSSDK.IO.DATATYPE;
import br.com.dbsoft.error.DBSIOException;
import br.com.dbsoft.message.DBSMessage;
import br.com.dbsoft.util.DBSIO;
import br.com.dbsoft.util.DBSIO.MOVE_DIRECTION;
import br.com.dbsoft.util.DBSObject;
import br.com.dbsoft.util.DBSString;
/**
* @param <DataModelClass> Classe Model da tabela do banco de dados ou classe com atributos homônimos as colunas com as quais se deseje trabalhar no DAO.<br/>
* É necessário também passar esta classe no construtor.
*/
/**
* @author ricardo.villar
*
* @param <DataModelClass>
*/
public class DBSDAO<DataModelClass> extends DBSDAOBase<DataModelClass> {
private static final long serialVersionUID = -3273102169619413848L;
public static enum COMMAND{
INSERT,
UPDATE,
DELETE,
SELECT,
LOCK;
}
//#########################################################################################################
//## Working Variables #
//#########################################################################################################
private Connection wConnection;
private String wQuerySQL = "";
//private ResultSet wSelectResultSet;
private String wUK = ""; //Colunas da pesquisa, separada por virgula, que será usadas para retornar ao mesmo registro em caso de refresh
private String wPK = "";
private String[] wPKs = new String[]{};
private String[] wUKs = new String[]{};
private ResultSetMetaData wQueryResultSetMetaData;
private String wQuerySQLUK=""; //Query SQL alterada a partir da wsQuerySql, para conter a informação necessários para o controle dos registros
private DBSRow wQueryColumns = new DBSRow(); //Colunas existente na pesquisa
private DBSRow wCommandColumns = new DBSRow(); //Colunas que sofreráo modificação de dados
private String wCommandTableName = ""; //Nome da tabela que sofrerá modificação de dados
private boolean wAutoIncrementPK = true; //Efetua o insert e recuperar o valores do campos com autoincrement;
private DBSResultDataModel wResultDataModel;
private int wCurrentRowIndex = -1;
private boolean wIsMerging = false;
private DataModelListener wDataModelListener = new DataModelListener(){
@Override
public void rowSelected(DataModelEvent pEvent) {
if (FacesContext.getCurrentInstance() == null){return;}
PhaseId xId = FacesContext.getCurrentInstance().getCurrentPhaseId();
// System.out.println("PahseId:" + xId.toString());
// System.out.println("PahseId:" + getQuerySQL());
// System.out.println("PahseId:" + FacesContext.getCurrentInstance().getPartialViewContext().isAjaxRequest() + ":" + xId.toString() + "[" + wCurrentRowIndex + ":" + pEvent.getRowIndex() + "]");
// if ((!xId.equals(PhaseId.RENDER_RESPONSE) && !xId.equals(PhaseId.INVOKE_APPLICATION)) || pEvent.getRowIndex() != -1){
// }
if (xId == null
// || (!xId.equals(PhaseId.RENDER_RESPONSE) && !xId.equals(PhaseId.INVOKE_APPLICATION)) || pEvent.getRowIndex() != -1) {
|| (xId.equals(PhaseId.INVOKE_APPLICATION) && pEvent.getRowIndex() != -1)){
try {
pvCopyValueFromResultDataModel(pEvent.getRowIndex());
} catch (DBSIOException e) {
wLogger.error(e);
}
}
// if (xId == null
// || ((xId.equals(PhaseId.RENDER_RESPONSE) || xId.equals(PhaseId.INVOKE_APPLICATION)) && pEvent.getRowIndex() != -1)){
// try {
// synchronize();
// } catch (DBSIOException e) {
// wLogger.error(e);
// }
// }
// if (xId == null
// || (pEvent.getRowIndex() != -1 &&
// ((xId.equals(PhaseId.INVOKE_APPLICATION) && pEvent.getRowIndex() != -1) || xId.equals(PhaseId.RENDER_RESPONSE)))){
// setCurrentRowIndex(pEvent.getRowIndex());
// }else{
// System.out.println("stop");
// }
}
};
//private boolean wHasVersionControl=false;
//#########################################################################################################
//## Public Properties #
//#########################################################################################################
/**
* Cria novo DAO.
* @param pConnection Conexão com o banco de dados
*/
public DBSDAO(Connection pConnection) {
this.setConnection(pConnection);
}
/**
* Cria novo DAO.
* @param pDataModel Classe Model da tabela do banco de dados ou classe com atributos homônimos as colunas com as quais se deseje trabalhar no DAO.<br/>
* @param pConnection
* @throws DBSIOException
*/
public DBSDAO(Class<DataModelClass> pDataModelClass, Connection pConnection) throws DBSIOException {
super(pDataModelClass);
this.setConnection(pConnection);
//Recupera nome da tabela, através da anotação @DataModel, caso exista na classe informada
DBSTableModel xAnnotation = DBSIO.getAnnotationDataModel(wDataModel);
if (xAnnotation!=null){
this.setCommandTableName(xAnnotation.tablename());
}
}
/**
* Cria novo DAO.
* @param pConnection Conexão com o banco de dados
* @param pCommandTableName Nome da tabela que sofrerá Insert/Update/Delete.<br/>
* O nome deverá ser o nome exato que está no banco de dados.<br/>
* Certifique-se que não há problema de letra maiúscula ou minúscula para encontrar a tabela no banco.<br/>
* Caso também esteja sendo executada uma <i>query</i> via <b>open</b>, está tabela <b>não</b> poderá ter um <i>alias</i>.
* @throws SQLException
*/
public DBSDAO(Connection pConnection, String pCommandTableName) throws DBSIOException{
this.setConnection(pConnection);
this.setCommandTableName(pCommandTableName);
}
/**
* Cria novo DAO.
* @param pDataModelClass Classe Model da tabela do banco de dados ou classe com atributos homônimos as colunas com as quais se deseje trabalhar no DAO.
* @param pConnection Conexão com o banco de dados.
* @param pCommandTableName Nome da tabela que sofrerá Insert/Update/Delete.<br/>
* Certifique-se que não há problema de letra maiúscula ou minúscula para encontrar a tabela no banco.
* @throws SQLException
*/
public DBSDAO(Class<DataModelClass> pDataModelClass, Connection pConnection, String pCommandTableName) throws DBSIOException{
super(pDataModelClass);
this.setConnection(pConnection);
this.setCommandTableName(pCommandTableName);
}
/**
* Cria novo DAO.
* @param pConnection Conexão com o banco de dados.
* @param pCommandTableName Nome da tabela que sofrerá Insert/Update/Delete.<br/>
* Certifique-se que não há problema de letra maiúscula ou minúscula para encontrar a tabela no banco.
* @param pPK Nomes das colunas que serão utilizadas para identificar se é um registro único separadas por virgula(,).<br/>
* Esta informação precisa ser passada caso as colunas que são PK não estejam configuradas no banco de dados como tal.
* @throws SQLException
*/
public DBSDAO(Connection pConnection, String pCommandTableName, String pPK) throws DBSIOException{
this.setConnection(pConnection);
this.setCommandTableName(pCommandTableName, pPK);
}
/**
* Cria novo DAO.
* @param pDataModelClass Classe Model da tabela do banco de dados ou classe com atributos homônimos as colunas com as quais se deseje trabalhar no DAO.
* @param pConnection Conexão com o banco de dados.
* @param pCommandTableName Nome da tabela que sofrerá Insert/Update/Delete.<br/>
* Certifique-se que não há problema de letra maiúscula ou minúscula para encontrar a tabela no banco.
* @param pPK Nomes das colunas que serão utilizadas para identificar se é um registro único, separadas por virgula(,).
* Esta informação precisa ser passada caso as colunas que são PK não estejam configuradas no banco de dados como tal.
* @throws SQLException
*/
public DBSDAO(Class<DataModelClass> pDataModelClass, Connection pConnection, String pCommandTableName, String pPK) throws DBSIOException{
super(pDataModelClass);
this.setConnection(pConnection);
this.setCommandTableName(pCommandTableName, pPK);
}
/**
* Retorna a Conexão.
* @param pConnection Conexão com o banco de dados
*/
public final void setConnection(Connection pConnection) {
if (!DBSObject.isEmpty(pConnection)){
this.wConnection = pConnection;
}
}
/**
* Configura a Conexão.
* @return Conexão com o banco
*/
public final Connection getConnection() {
return wConnection;
}
/**
* Retorna comando SQL utilizado para efetuar a pesquisa.
* @return Comando Sql utilizado
*/
public final String getQuerySQL() {
return wQuerySQL;
}
/**
* Configura o comando SQL utilizado para pesquisa.<br/>
* @param pQuerySQL
*/
public final void setQuerySQL(String pQuerySQL) {
wQuerySQL = pQuerySQL;
}
/**
* @return Quantidade de colunas da pesquisa.
* @throws SQLException
*/
public final int getQueryColumnsCount() throws SQLException{
return wQueryResultSetMetaData.getColumnCount();
}
/**
* Retorna o total de linhas da Query no caso de ter efetuado uma pesquisa via <b>open</b>.<br/>
* Retorna o total de linhas da tabela no caso de <b>não</b> ter efetuado uma pesquisa via <b>open</b>, mas ter definido a <b>CommandTableName</b>.
* @return Quantidade.
* @throws DBSIOException
*/
@Override
public final int getRowsCount() throws DBSIOException{
if (wResultDataModel == null){
if (DBSObject.isEmpty(this.wQuerySQL)
&& !DBSObject.isEmpty(this.wCommandTableName)){
return DBSIO.getTableRowsCount(this.wConnection, this.wCommandTableName);
}
return 0;
}else{
return wResultDataModel.getRowCount();
}
// Comentado em 3/dez/2013 - Aparentemente, código abaixo era desnecessário. - Ricardo
// try {
// if (!DBSObject.isEmpty(this.wQuerySQL)){
// return DBSIO.getSQLRowsCount(this.wConnection, this.wQuerySQL);
// }else if (!DBSObject.isEmpty(this.wCommandTableName)){
// return DBSIO.getTableRowsCount(this.wConnection, this.wCommandTableName);
// }else{
// return 0;
// }
// } catch (DBSIOException e) {
// wLogger.error("getRowsCount", e);
// return 0;
// }
}
/**
* Retorna a indice do registro corrente.
* Caso não haja registro, retorna -1.
* @return
*/
public final int getCurrentRowIndex(){
if (getResultDataModel()==null){
wCurrentRowIndex = -1;
}
return wCurrentRowIndex; //getResultDataModel().getRowIndex();
}
/**
* Seta o registro corrente a partir o indice informado.<br/>
* Caso indice seja maior que o existente, posiciona no último.<br/>
* Caso não existam registros, posiciona no 'anterior ao primeiro'(-1).
* @param pRowIndex
* @throws DBSIOException
*/
public final boolean setCurrentRowIndex(int pRowIndex) throws DBSIOException{
if (getResultDataModel()==null){
return false;
}
boolean xOk = true;
//Posiciona no último registro caso o valor informado seja maior a quantidade de registros existentes
if ((pRowIndex + 1) > getResultDataModel().getRowCount()){
pRowIndex = getResultDataModel().getRowCount() - 1;
xOk = false;
}
//Posiciona no registro anterior ao primeiro caso o valor informado seja inferior a -1
if (pRowIndex < -1){
pRowIndex = - 1;
xOk = false;
}
//Indica qual o registro corrente a partir o indice informado.
//Este comando dispara automativamente o evento rowSelected, que por sua vez chama pvSetRowPositionChanged,
//onde os valores são recupedados do resultset e copiados para as variáveis locais
if (pRowIndex != getResultDataModel().getRowIndex()){
getResultDataModel().setRowIndex(pRowIndex);
}
pvCopyValueFromResultDataModel(pRowIndex);
return xOk;
}
/**
* Retorna se o registro atual é um novo registro.<br/>
* Os dados deste registro existem somente em memória, sendo necessário implementar a rotina para salva-los.
* @return
*/
public final boolean getIsNewRow(){
if (wResultDataModel == null){
return false;
}
if (wResultDataModel.getRowIndex() > (getRowsCountAfterRefresh() - 1)){
return true;
}
return false;
}
public boolean isMerging() {
return wIsMerging;
}
public void setMerging(boolean pMerging) {
wIsMerging = pMerging;
}
/**
* Nome da coluna na tabela ou alias(as) atribuido no select.
* @param pColumnIndex númeroda coluna que se deseja saber o nome
* @return Nome da coluna
* @throws SQLException
*/
public final String getQueryColumnName(Integer pColumnIndex) throws SQLException{
return wQueryResultSetMetaData.getColumnName(pColumnIndex);
}
/**
* Retorna todas as colunas da query.
* @return
*/
@Override
public final Collection<DBSColumn> getColumns() {
return wQueryColumns.getColumns();
}
/**
* Retorna coluna a partir do nome informado.<br/>
* Caso a coluna não exista na query,
* será pesquisado também na tabela principal que sofrerá a edição(se houver).
* @param pColumnName
* @return
*/
@Override
public final DBSColumn getColumn(String pColumnName) {
if (pColumnName==null){return null;}
String xColumnName = pvGetColumnName(pColumnName);
if (wQueryColumns.containsKey(xColumnName)){
return wQueryColumns.getColumn(xColumnName);
}else if (wCommandColumns.containsKey(xColumnName)){
return wCommandColumns.getColumn(xColumnName);
}
wLogger.error("getValue:Coluna não encontrada.[" + pColumnName + "][" + wQuerySQL + "][" + wCommandTableName + "]");
return null;
}
/**
* retorna a coluna a partir no número informado
* @return
*/
@Override
public final DBSColumn getColumn(int pColumnIndex) {
return wQueryColumns.getColumn(pColumnIndex);
}
/**
* @return Todas as colunas da tabela que sofrerá modificação.
*/
public final Collection<DBSColumn> getCommandColumns() {
return wCommandColumns.getColumns();
}
/**
* Retorna coluna da tabela.
* @param pColumnName Nome da coluna que se deseja os dados
* @return Coluna ou null se não for encontrada
*/
public final DBSColumn getCommandColumn(String pColumnName){
if (pColumnName==null){return null;}
pColumnName = pvGetColumnName(pColumnName);
return wCommandColumns.getColumn(pvGetColumnName(pColumnName));
}
/**
* @return Noma da tabela que sofrerá a modificação
*/
public final String getCommandTableName() {
return wCommandTableName;
}
/**
* Configura o nome da tabela que sofrerá as modificações(<b>INSERT, UPDATE, DELETE</b>).<br/>
* Não é necessário efetuar uma <i>query</i> via <i>Open</i> para efetuar as modificações.
* No entando, os dados recuperados pela query, serão aproveitados para efeito das modificações,
* facilitando, por exemplo, um <b>Update</b> no registro corrente.
* @param pCommandTableName Nome da tabela.
* @throws SQLException
*/
public final void setCommandTableName(String pCommandTableName) throws DBSIOException{
setCommandTableName(pCommandTableName, "");
}
/**
* Configura o nome da tabela que sofrerá as modificações.<br/>
* Não é necessário efetuar uma <i>query</i> via <i>Open</i> para efetuar as modificações.
* No entando, os dados recuperados pela query, serão aproveitados para efeito das modificações,
* facilitando, por exemplo, um <b>Update</b> no registro corrente.
* @param pCommandTableName Nome da tabela que sofrerá a modificação
* @param pPK Nomes da colunas que vão representar a chave primária, caso queira forçar ou definir uma PK, mesmo que não exista na tabela.
* @throws DBSIOException
*/
public final void setCommandTableName(String pCommandTableName, String pPK) throws DBSIOException{
if (!DBSObject.isEmpty(pCommandTableName) && //Se nome não for vazio
!wCommandTableName.equals(pCommandTableName.trim())){//Se nome da table for diferente da anterior
if (wQueryColumns.size() > 0){
wLogger.error("DBSDAO: CommandTableName deve ser configurada ANTES de efetuar o Open() ou no momento da criação da nova instância do DAO.");
}
this.wCommandTableName = pCommandTableName.trim();
pvSetPK(pPK);
pvCreateCommandColumns();
}
}
/**
* Retorna nome das colunas que identificam a chave primária dos resgistros
* da tabela pricipal que sofrerá a edição, conforme definição da <b>commandTableName</b>.<br/>
* No caso de haver mais de uma coluna como PK, os nomes das colunas serão separados por vírgula.
* @return
*/
public final String getPK(){
return wPK;
}
/**
* Retorna uma string contendo os nomes das colunas que formam o UK que será responsável
* por identificar um linha única, podente haver colunas de mais de uma tabela ou <b>alias</b>.<br/>
* No caso de haver mais de uma coluna como UK, os nomes das colunas serão separados por vírgula.
* @return
*/
@Override
public final String getUK(){
return wUK;
}
/**
* Retorna valor da UK assumunindo que há somente uma coluna.<br/>
* Coluna pode ser um <b>alias</b> de mais de uma coluna.
* @return
*/
@Override
public final Object getUKValue(){
return this.getValue(UKName);
}
/**
* Indica se a coluna que é PK é de auto-incremento. O padrão é TRUE.
* Se a tabela possuir mais de uma coluna como PK, o padrão passa a ser FALSE.
* @return
*/
public final boolean isAutoIncrementPK() {
return wAutoIncrementPK;
}
/**
* Indica se a coluna que é PK é de auto-incremento.
* @param pAutoIncrementPK
*/
public final void setAutoIncrementPK(boolean pAutoIncrementPK) {
this.wAutoIncrementPK = pAutoIncrementPK;
}
/**
* Retorna o valor da coluna antes de alteração
* @param pColumnName
* @return
*/
public final <A> A getValueOriginal(String pColumnName){
if (pColumnName==null){return null;}
String xColumnName = pvGetColumnName(pColumnName);
if (wCommandColumns.containsKey(xColumnName)){
return wCommandColumns.<A>getValueOriginal(xColumnName);
}else if (wQueryColumns.containsKey(xColumnName)){
return wQueryColumns.<A>getValueOriginal(xColumnName);
}
if (getShowColumnNameNotFoundMessage()){
wLogger.error("DBSDAO.getValue:Coluna não encontrada.[" + pColumnName + "][" + wQuerySQL + "]");
}
return null;
}
// public static <A> A getValueX(String pColumnName){
// Double x = 0D;
// return (A) x;
// }
//
/**
* Retorna valor da coluna
* @param pColumnName Nome da coluna
* @return Valor
*/
@Override
public final <A> A getValue(String pColumnName){
if (pColumnName==null){return null;}
String xColumnName = pvGetColumnName(pColumnName);
//Retorna valor do DataModel se existir
if (wDataModel != null){
A xValue = pvGetLocalDataModelValue(xColumnName);
//Retorna valor se não for nulo.
if (xValue != null){
return xValue;
}else{
//Verifica se o campo existe no dataModel. Se não existir, tentará encontrar a coluna no wCommandColumn ou wSelectColumn abaixo
Field xField = DBSIO.getDataModelField(wDataModel, xColumnName);
if (xField!=null){
return null;
}
}
}
//Retorna valor a parti do controle local das colunas de comando
if (wCommandColumns.containsKey(xColumnName)){
return wCommandColumns.<A>getValue(xColumnName);
//Retorna valor a parti do controle local das colunas de pesquisa
}else if (wQueryColumns.containsKey(xColumnName)){
return wQueryColumns.<A>getValue(xColumnName);
}
if (getShowColumnNameNotFoundMessage()){
wLogger.error("DBSDAO.getValue:Coluna não encontrada.[" + pColumnName + "][" + wQuerySQL + "]");
}
return null;
}
/**
* Retorna o valor da coluna convertida para a classe do tipo informado
* @param pColumnName Nome da coluna
* @param pValueClass Classe para a qual será convertido o valor recebido
* @return
*/
public final <A> A getValue(String pColumnName, Class<A> pValueClass){
return DBSObject.<A>toClass(getValue(pColumnName), pValueClass);
}
@Override
public final void setValue(String pColumnName, Object pValue){
this.setValue(pColumnName, pValue, false);
}
@Override
public final void setValue(String pColumnName, Object pValue, boolean pOriginalValue){
if (pColumnName==null){return;}
boolean xAchou = false;
String xColumnName = pvGetColumnName(pColumnName);
//Seta o valor na coluna da tabela que poderá sofrer alteração
if (wCommandColumns.containsKey(xColumnName)){
wCommandColumns.setValue(xColumnName, pValue, pOriginalValue);
xAchou = true;
}
//Seta o valor na coluna do select, independentemente de ser uma coluna que da tabela comando(CommandTableName)
if (wQueryColumns.containsKey(xColumnName)){
wQueryColumns.setValue(xColumnName, pValue, pOriginalValue);
xAchou = true;
}
if (this.pvSetLocalDataModelValue(xColumnName, pValue)){
xAchou = true;
}
if (!xAchou
&& getShowColumnNameNotFoundMessage()){
wLogger.error("DBSDAO.setValue:Coluna não encontrada.[" + pColumnName + "][" + wQuerySQL + "]");
}
}
@Override
public DBSMessage getMessage(String pColumnName) {
if (pColumnName==null){return null;}
String xColumnName = pvGetColumnName(pColumnName);
//Retorna valor do DataModel se existir
if (wDataModel != null){
return null;
}
//Retorna valor a parti do controle local das colunas de comando
if (wCommandColumns.containsKey(xColumnName)){
return wCommandColumns.getColumn(pColumnName).getMessage();
//Retorna valor a parti do controle local das colunas de pesquisa
}else if (wQueryColumns.containsKey(xColumnName)){
return wQueryColumns.getColumn(pColumnName).getMessage();
}
if (getShowColumnNameNotFoundMessage()){
wLogger.error("DBSDAO.getValue:Coluna não encontrada.[" + pColumnName + "][" + wQuerySQL + "]");
}
return null;
}
/**
* Retorna valor da coluna diretamente do ResultDataModel.<br/>
* Isto é utilizado, durante a atualização de um dataTable,
* os dados do registro atual somente poderam ser recuperado por este método.
* @param pColumnName
* @return
*/
@SuppressWarnings("unchecked")
public final <A> A getListValue(String pColumnName){
if (wResultDataModel != null
&& wResultDataModel.getRowIndex() != -1){ //Incluido em 19/mar/2015 - Ricardo
String xColumnName = pvGetColumnName(pColumnName);
return (A) wResultDataModel.getRowData().get(xColumnName);
}else{
return null;
}
}
/**
* Retorna valor da coluna informada diretamente do ResultDataModel convertida para a classe do tipo informado.
* @param pColumnName Nome da coluna
* @param pValueClass Classe para a qual será convertido o valor recebido
* @return
*/
public final <A> A getListValue(String pColumnName, Class<A> pValueClass){
return DBSObject.<A>toClass(getListValue(pColumnName), pValueClass);
}
/**
* Seta o valor da coluna informada diretamenteo no ResultDataModel.
* @param pColumnName Nome da coluna
* @param pValue Valor da coluna
* @return
*/
public final void setListValue(String pColumnName, Object pValue){
if (wResultDataModel != null){
String xColumnName = pvGetColumnName(pColumnName);
wResultDataModel.getRowData().put(xColumnName, pValue);
}
}
/**
* Retorna o comando que será executado no INSERT, UPDATE, DELETE, SELECT
* @param pCommand Comando que será executado
* @return Retorna a String contendo o comando SQL que será executado
* @throws DBSIOException
*/
public final String getSQLExecuteCommand(DBSDAO.COMMAND pCommand) throws DBSIOException{
String xStr = "";
pvCopyDataModelFieldsValueToCommandValue(wDataModel);
xStr = DBSIO.getDAOSQLCommand(this, pCommand);
return xStr;
}
/**
* Use este atributo para popular páginas pelos ManagedBeans.
* Retorna os registro de forma que possam ser entendidos nas páginas xhtml por EL.
* As colunas poderão ser acessadas diretamente como atributos de uma classe.<br/>
* Os nomes dos atributos são os próprios nomes definidos as colunas do select.<br/>
* exemplo de código xhtlm "#{table.campo}"
* @return
*/
public final DBSResultDataModel getResultDataModel(){
return wResultDataModel;
}
/**
* Verifica se coluna existe conforme o nome informado
* @param pColumnName
* @return true = Existe / false = não existe
*/
public final boolean containsColumn(String pColumnName){
if (pColumnName==null){return false;}
String xColumnName = pvGetColumnName(pColumnName);
if (wCommandColumns.containsKey(xColumnName)){
return true;
}else if (wQueryColumns.containsKey(xColumnName)){
return true;
}
return false;
}
/**
* Executa a query informada em setQuerySQL() ou a executada anteriormente, caso exista.<br/>
* Caso deseje somente atualizar os dados da query, utilize o método <b>refresh()<b>.<br/>
* O cursor é posicionado no primeiro registro, sendo necessário executar moveBeforeFirst quando se deseja utilizar while(dao.movenext)
* @throws DBSIOException
*/
@Override
public final boolean open() throws DBSIOException{
return this.open(wQuerySQL, getUK());
}
/**
* Executa a query informada em <b>pQuerySQL</b>.
* O cursor é posicionado no primeiro registro, sendo necessário executar moveBeforeFirst quando se deseja utilizar while(dao.movenext)
* @param pQuerySQL Select SQL que será utlizada para efetuar a pesquisa
* @throws DBSIOException
*/
@Override
public final boolean open(String pQuerySQL) throws DBSIOException{
return this.open(pQuerySQL,"");
}
/**
* Executa a query informada em <b>pQuerySQL</b>.
* O cursor é posicionado no primeiro registro, sendo necessário executar moveBeforeFirst quando se deseja utilizar while(dao.movenext)
* @param pQuerySQL Query padrão SQL ANSI que será utlizada para efetuar a pesquisa
* @param pUK String com os nomes das columnas(separadas por virgula) que será utilizadas como UK dos registros,
* em substituição a PK da tabela, caso exista ou tenha sido informada no construtor do DAO.
* @return true = Sem erro / false = erro / null = exception
* @throws DBSIOException
*/
public final synchronized boolean open(String pQuerySQL, String pUK) throws DBSIOException{
//System.out.println("DBSDO - open INICIO-----------------------------");
if (wConnection == null){
wLogger.error("DBSDAO:open: Conexão não informada.");
return false;
}
if (DBSObject.isEmpty(pQuerySQL)){
wResultDataModel = null;
wQueryResultSetMetaData = null;
return false;
}
//Se não for uma pesquisa com o comando 'SELECT', ignora a chamada
if (DBSString.getInStr(pQuerySQL, "SELECT ",false)==0){
return false;
}
wQuerySQLUK = pQuerySQL.trim();
wQuerySQL = wQuerySQLUK;
if (DBSObject.isEmpty(pUK)){ //Configura a UK dos registros se o conteúdo não for vazio
//Configura a UK como sendo a PK tabela, caso a pesquisa seja de uma única tabela
if (DBSString.getStringCount(wQuerySQLUK, "Select", false) <= 1){
pvSetUK(wPK);
}else{
//Sendo uma pesquisa de mais de uma tabela, não utliza a PK como UK, passando a ser necessário que
//o usuário informe a UK caso queira identificar a posição do registro
pvSetUK("");
}
}else{
//Utiliza a UK informada;
pvSetUK(pUK);
}
//Se foi definido as columas de UK da pesquisa. Cria uma coluna de conterá a UK que será utilizada para identificar a linha
if (wUKs.length>0){
wQuerySQLUK = DBSIO.changeAsteriskFromQuerySQL(wQuerySQLUK);
if (DBSString.getInStr(wQuerySQLUK, " DISTINCT ", false) >0){
//Altera o(s) SELECT DISTINCT
wQuerySQLUK = DBSString.changeStr(wQuerySQLUK, "SELECT DISTINCT", "SELECT DISTINCT " + this.pvGetUKConcatenaded() + " AS " + UKName + ", ", false);
}else{
wQuerySQLUK = DBSString.changeStr(wQuerySQLUK, "SELECT ", "SELECT " + this.pvGetUKConcatenaded() + " AS " + UKName + ", ", false);
if (DBSString.getInStr(wQuerySQLUK, " UNION ", false) >0){
wQuerySQLUK = DBSString.changeStr(wQuerySQLUK, "UNION SELECT ", "UNION SELECT " + this.pvGetUKConcatenaded() + " AS " + UKName + ", ", false);
}
}
//Inclui coluna DBSUK no GROUP BY se houver
wQuerySQLUK = DBSString.changeStr(wQuerySQLUK, "GROUP BY ", "GROUP BY " + this.pvGetUKConcatenaded() + ", ", false);
}
//Atualiza a pesquisa
refresh();
return true;
}
/**
* Le os registros da tabela de comando(CommandTable) utilizando como filtro os valores das colunas definidas como chaves.<br/>
* Se não houver definição das colunas que são chaves, serão lidos todos os registros.<br/>
* A definição das colunas que são chaves é efetuada manualmente no construtor do DAO ou é recuperada automaticamente
* diretamente da definição da tabela no banco de dados.
* Também é possível indicar se a coluna é chave através do atributo <b>getCommandColumn("coluna").setPK(true).<b/>
* O parametro <b>pAdditionalSQLWhereCondition</b> é um filtro adicional.<br/>
* Este método é similar ao <b>open<b/>, porém a query SQL é criada automaticamente.
* Posiciona no primeiro registro lido de houver.<br/>
* Se não houver registro, retorna false.
* @param pAdditionalSQLWhereCondition Texto da condição(sem 'WHERE') a ser adicionada a cláusula 'WHERE' já gerada automaticamente. <br/>
* @return false se não encontrar nenhum registro
* @throws SQLException
*/
public synchronized boolean openCommandTable(String pAdditionalSQLWhereCondition) throws DBSIOException{
if (wCommandColumns.size() == 0){
wLogger.error("DBSDAO:executeUpdate: Não foram encontradas colunas alteradas para efetuar o comando de UPDATE.");
return false;
}
if (this.wConnection!=null){
String xSQLCommand = DBSIO.getDAOSQLCommand(this, COMMAND.SELECT, pAdditionalSQLWhereCondition);
open(xSQLCommand);
if (getRowsCount() > 0){
return true;
}
}
return false;
}
/**
* Le os registros da tabela de comando(CommandTable) utilizando como filtro os valores das colunas definidas como chaves.<br/>
* Se não houver definição das colunas que são chaves, serão lidos todos os registros.<br/>
* A definição das colunas que são chaves é efetuada manualmente no construtor do DAO ou é recuperada automaticamente
* diretamente da definição da tabela no banco de dados.
* Também é possível indicar se a coluna é chave através do atributo <b>getCommandColumn("coluna").setPK(true).<b/>
* Este método é similar ao <b>open<b/>, porém a query SQL é criada automaticamente.
* Posiciona no primeiro registro lido de houver.<br/>
* Se não houver registro, retorna false.
* @return false se não encontrar nenhum registro
* @throws SQLException
*/
public synchronized boolean openCommandTable() throws DBSIOException{
return openCommandTable("");
}
/**
* Fecha o DAO.
* É importante efetuar o close, caso se deseje utilizar o mesmo DAO para efetuar outras acessos.<br/>
* Um novo open em um DAO já fechado, irá executar a mesma query já informada anteriormente.
* @throws DBSIOException
*/
@Override
public final synchronized void close() throws DBSIOException{
if (pvFireEventBeforeClose()){
//Limpa lista de colunas do query para forçar a recriação em caso de novo open
wQueryColumns.clear();
wResultDataModel = null;
pvFireEventAfterClose(true);
}
}
/**
* Popula o resultset com os registros atuais e cria lista.
* @return true = Sem erro; false = Com erro
* @throws SQLException
*/
@SuppressWarnings("unchecked")
public final synchronized boolean refresh() throws DBSIOException{
//Executa a Select para recuperar os registros
if (pvFireEventBeforeOpen()){
ResultSet xSelectResultSet;
//Reset dos componentes
wCurrentRowIndex = -1;
wResultDataModel = null;
wQueryResultSetMetaData = null;
//-----------------
xSelectResultSet = DBSIO.openResultSet(this.getConnection(),wQuerySQLUK);
//wResultDataModel é necessário para consulta com html pois possibilita o acesso as colunas do registro
Result xResult = ResultSupport.toResult(xSelectResultSet);
wResultDataModel = new DBSResultDataModel(xResult.getRows());
xResult = null;
//Configura listener local para acomponhar seleção de registro
wResultDataModel.addDataModelListener(wDataModelListener);
try{
wQueryResultSetMetaData = xSelectResultSet.getMetaData();
pvCreateSelectColumns(wQueryResultSetMetaData);
//Chame evento
pvFireEventAfterOpen(true);
//Caso não exista o primeiro registro, move para posição inicial onde não há registro válido...
if (!moveFirstRow()) {
moveBeforeFirstRow();
}
setRowsCountAfterRefresh(getRowsCount());
return true;
}catch(SQLException e){
wLogger.error(e);
//Chame evento
pvFireEventAfterOpen(false);
DBSIO.throwIOException("refreshResultSet:" + wQuerySQLUK, e, wConnection);
return false;
}finally{
DBSIO.closeResultSet(xSelectResultSet);
}
}else{
//Chame evento
pvFireEventAfterOpen(false);
return false;
}
}
/**
* Atualizar os valores correntes com os dados da posição corrente do ResultDataModel.<br/>
* @throws DBSIOException
*/
public void synchronize() throws DBSIOException{
if (getResultDataModel() != null){
pvCopyValueFromResultDataModel(getResultDataModel().getRowIndex());
}
}
/**
* Retorna se está no primeiro registro
* @return
*/
public boolean getIsFist(){
if (wResultDataModel != null){
return wCurrentRowIndex==0;
}else{
return false;
}
}
/**
* Retorna se está no último registro
* @return
*/
public boolean getIsLast(){
if (wResultDataModel != null){
return wCurrentRowIndex == (wResultDataModel.getRowCount() - 1);
}else{
return false;
}
}
//#########################################################################################################
//## Public Methods #
//#########################################################################################################
@Override
public synchronized void moveBeforeFirstRow() throws DBSIOException{
pvMove(MOVE_DIRECTION.BEFORE_FIRST);
}
@Override
public synchronized boolean moveFirstRow() throws DBSIOException{
return pvMove(MOVE_DIRECTION.FIRST);
}
@Override
public synchronized boolean movePreviousRow() throws DBSIOException{
return pvMove(MOVE_DIRECTION.PREVIOUS);
}
@Override
public synchronized boolean moveNextRow() throws DBSIOException{
return pvMove(MOVE_DIRECTION.NEXT);
}
@Override
public synchronized boolean moveLastRow() throws DBSIOException{
return pvMove(MOVE_DIRECTION.LAST);
}
/**
* Executa o insert da tabela definida como CommandTable.<br/>
* Consulte o atributo <b>executeOnlyChangedValues</b> para outras considerações relacionadas ao insert.
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
@Override
public synchronized int executeInsert() throws DBSIOException{
if (!pvCheckColumnSize("executeInsert")){
return 0;
}
int xCount = 0;
if (this.wConnection!=null){
pvCopyDataModelFieldsValueToCommandValue(wDataModel);
if (pvFireEventBeforeInsert()){
xCount = DBSIO.executeDAOCommand(this, DBSDAO.COMMAND.INSERT, wAutoIncrementPK);
pvFireEventAfterInsert(true);
return xCount;
}
}
pvFireEventAfterInsert(false);
return xCount;
}
/**
* Exclui e retorna a quantidade de registros excluidos.<br/>
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
@Override
public synchronized final int executeDelete() throws DBSIOException{
if (!pvCheckColumnSize("executeDelete")){
return 0;
}
int xCount = 0;
if (this.wConnection!=null){
if(pvFireEventBeforeDelete()){
pvCopyDataModelFieldsValueToCommandValue(wDataModel);
xCount = DBSIO.executeDAOCommand(this, DBSDAO.COMMAND.DELETE);
pvFireEventAfterDelete(true);
return xCount;
}
}
pvFireEventAfterDelete(false);
return xCount;
}
/**
* Atualiza registro.<br/>
* Consulte o atributo <b>executeOnlyChangedValues</b> para outras considerações relacionadas ao update.<br/>
* Colunas definidas como PK não serão atualizadas.
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
@Override
public synchronized int executeUpdate() throws DBSIOException{
return executeUpdate("");
}
/**
* Atualiza registros
* @param pAdditionalSQLWhereCondition Texto da condição(sem 'WHERE') a ser adicionada a cláusula 'WHERE' que já será gerada automaticamente. <br/>
* Colunas definidas como PK não serão atualizadas.
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
public synchronized int executeUpdate(String pAdditionalSQLWhereCondition) throws DBSIOException{
if (!pvCheckColumnSize("executeUpdate")){
return 0;
}
int xCount = 0;
if (this.wConnection!=null){
//Chama evento
if (pvFireEventBeforeUpdate()){
//Copia os valores do wDataModel para os valores das colunas que efetuará o comando
pvCopyDataModelFieldsValueToCommandValue(wDataModel);
//Executa o update
xCount = DBSIO.executeDAOCommand(this, COMMAND.UPDATE, pAdditionalSQLWhereCondition);
pvFireEventAfterUpdate(true);
return xCount;
}
}
pvFireEventAfterUpdate(false);
return 0;
}
@Override
/**
* Efetua o <b>UPDATE</b> considerando a PK, e caso não tenha sido encontrado registro algum, efetua o <b>INSERT</b>.<br/>
* Isto otimiza a utilização do espaço do banco de dados em comparação um <b>DELETE</b> seguido de <b>INSERT</b>.<br/>
* Porém aumenta o tempo de processamento, já que será efetuada a tentativa de <b>UPDATE</b> antes do <b>INSERT</b>.<br/>
* Colunas definidas como PK não serão atualizadas.
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
public synchronized final int executeMerge() throws DBSIOException{
return executeMerge("");
}
/**
* Efetua o <b>UPDATE</b> considerando a PK, e caso não tenha sido encontrado registro algum, efetua o <b>INSERT</b>.<br/>
* Isto otimiza a utilização do espaço do banco de dados em comparação um <b>DELETE</b> seguido de <b>INSERT</b>.<br/>
* Porém aumenta o tempo de processamento, já que será efetuada a tentativa de <b>UPDATE</b> antes do <b>INSERT</b>.<br/>
* Colunas definidas como PK não serão atualizadas.
* @param pAdditionalSQLWhereCondition Texto da condição(sem 'WHERE') a ser adicionada a cláusula 'WHERE' que já será gerada automaticamente no caso do <b>UPDATE</b>.<br/>
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
public synchronized final int executeMerge(String pAdditionalSQLWhereCondition) throws DBSIOException{
if (!pvCheckColumnSize("executeMerge")){
return 0;
}
int xN=-1;
if (this.wConnection!=null){
pvCopyDataModelFieldsValueToCommandValue(wDataModel);
if (pvFireEventBeforeMerge()){
//Savepoint xS = DBSIO.beginTrans(this.getConnection(), "EXECUTEMERGE"); //Cria savepoint interno para retornar em caso de erro já que o update pode funcionar mais o insert não
try{
setMerging(true);
xN = executeUpdate(pAdditionalSQLWhereCondition);//Atualiza registro, se existir
if (xN==0){ //Se não foi atualiza registro algum...
xN = executeInsert(); //Insere novo registro
}
if (xN<=0){ //Se nehum registro foi alterado é pq houve erro
//DBSIO.endTrans(this.getConnection(),false,xS); //ignora Update ou Insert em caso de erro. Rollback até EXECUTEMERGE
}
}catch(DBSIOException e){
throw e;
}finally{
setMerging(false);
}
pvFireEventAfterMerge(true);
return xN;
}
}
pvFireEventAfterMerge(false);
return xN;
}
/**
* Força que os valores atuais sejam os valores lidos originalmente
*/
public final void restoreValuesOriginal(){
wQueryColumns.restoreValuesOriginal();
wCommandColumns.restoreValuesOriginal();
}
/**
* Força que os valores atuais seja o default
*/
public final void restoreValuesDefault(){
wQueryColumns.restoreValuesDefault();
wCommandColumns.restoreValuesDefault();
}
/**
* Força que o valor original seja igual ao atual
*/
public final void copyValueToValueOriginal(){
wQueryColumns.copyValueToValueOriginal();
wCommandColumns.copyValueToValueOriginal();
}
/**
* Seta o registro corrente como tendo os valores iquais aos indice informado e
* prepara todos as colunas para serem enviadas no próximo <b>insert</b> ou <b>update</b>.<br/>
* Após este comando e antes do respectivo <b>insert</b> ou <b>update</b>, é possível alterar os valores pontualmente,
* <b>principalmente setar com nulo os campos que são <b>pk</b>, no caso de um insert</b>
* @param pRowIndex
* @throws DBSIOException
*/
public final void paste(int pRowIndex) throws DBSIOException{
setCurrentRowIndex(pRowIndex);
wQueryColumns.setChanged();
wCommandColumns.setChanged();
}
/**
* Prepara todos as colunas para serem enviadas no próximo <b>insert</b> ou <b>update</b>,
* considerando os valores do registro atual.<br/>
* Após este comando e antes do respectivo <b>insert</b> ou <b>update</b>, é possível alterar os valores pontualmente,
* <b>principalmente setar com nulo os campos que são <b>pk</b>, no caso de um insert</b>
* @param pRowIndex
* @throws DBSIOException
*/
public final void paste() throws DBSIOException{
setCurrentRowIndex(getCurrentRowIndex());
wQueryColumns.setChanged();
wCommandColumns.setChanged();
}
/**
* Inserir linha em branco ao resultaDataModel do DAO.<br/>
* Linha é criada somente na memória.
* @throws DBSIOException
*/
public final void insertEmptyRow() throws DBSIOException{
DBSIO.insertEmptyRow(this);
}
/**
* Cria em memória as colunas da tabela que poderão sofrer inclusão ou alteração.
* @throws SQLException
*/
private void pvCreateCommandColumns() throws DBSIOException{
ResultSet xMetaData = null;
try{
if (!DBSObject.isEmpty(wCommandTableName)){
String xColumnName;
boolean xEmpty = true;
//Excluir todas as colunas caso existam
wCommandColumns.clear();
//TODO inplementar controle de versão
//this.wHasVersionControl = false; //reseta a informação se há controle de versão durante o comando update
xMetaData = DBSIO.getTableColumnsMetaData(this.getConnection(), wCommandTableName);
//Inclui todas as colunas da tabela de comando no controle de colunas local
while (DBSIO.moveNext(xMetaData)){
xEmpty = false;
xColumnName = xMetaData.getString("COLUMN_NAME").toUpperCase().trim();
//Verificar se é uma coluna para ser ignorada e se nome de coluna já exista na lista.
if (!DBSIO.isColumnsIgnored(xColumnName)
&& !wCommandColumns.containsKey(xColumnName)){
wCommandColumns.MergeColumn(xColumnName,
DBSIO.toDataType(this.getConnection(), xMetaData.getInt("DATA_TYPE"), xMetaData.getInt("COLUMN_SIZE")),
xMetaData.getInt("COLUMN_SIZE"),
xMetaData.getObject("COLUMN_DEF"));
//TODO Controle de versão
// if (xMetaData.getString("COLUMN_NAME").toUpperCase().trim().equals(DBSSDK.IO.VERSION_COLUMN_NAME)){
// this.wHasVersionControl = true; //seta informando que existe controle de versão do update
// }
//Força a columa como sendo PK conforme a informação passada pelo usuário
if (pvIsPK(xColumnName)){
if (wCommandColumns.containsKey(xColumnName)){
wCommandColumns.getColumn(xColumnName).setPK(true);
}else{
//Se não encontrou a coluna informada, considera que a tabela não possui auto-incremento.
setAutoIncrementPK(false);
}
}
}
}
if (xEmpty){
wLogger.error("Não foi encontrada a tabela " + wCommandTableName + ". Verifique o acesso, o nome e questões de letra maiúscula/minúscula.");
}
//Se a PK não foi definida, busca definição na própria tabela
if (wPKs.length==0){
//Configura quais as colunas são PK
List<String> xPKs = DBSIO.getPrimaryKeys(this.getConnection(), wCommandTableName);
String xPK = "";
if (xPKs != null){
for (int x=0; x<=xPKs.size()-1;x++){
wCommandColumns.getColumn(xPKs.get(x)).setPK(true);
//Define a lista de colunas(wPK) que são PK para serem utilizadas como UK caso o usuario não informe. Isso � imporante para identificar o linha �nica no dataTable
if (!xPK.equals("")){
xPK += ",";
//Se houver mais de uma coluna como PK, considera que a tabela não possui auto-incremento.
setAutoIncrementPK(false);
}
xPK += wCommandColumns.getColumn(xPKs.get(x)).getColumnName();
}
}
pvSetPK(xPK);
}
//Se a PK foi definida e possui mais de uma coluna ou não possui pk, considera que a tabela não possui auto-incremento.
if (wPKs.length != 1) {
this.setAutoIncrementPK(false);
}
}
}catch (SQLException e){
DBSIO.throwIOException(wCommandTableName, e, wConnection);
}finally{
DBSIO.closeResultSet(xMetaData);
}
}
/**
* Cria, em memória, as colunas a partir das colunas informadas no consulta(wQuerySQL),
* para posteriormente serem utilizadas para verificar sua existência.
* @throws SQLException
*/
private void pvCreateSelectColumns(ResultSetMetaData pResultSetMetaData) throws DBSIOException{
try{
wQueryColumns.clear(); //Excluir todas as colunas caso existam
//Inclui todas as colunas da pesquina no control de colunas local
DBSColumn xColumn = null;
String xColumnName = "";
for (int x=1; x<=pResultSetMetaData.getColumnCount();x++){
xColumnName = pResultSetMetaData.getColumnLabel(x).toUpperCase().trim();
wQueryColumns.MergeColumn(xColumnName,
DBSIO.toDataType(this.getConnection(), pResultSetMetaData.getColumnType(x), pResultSetMetaData.getPrecision(x)),
pResultSetMetaData.getColumnDisplaySize(x),
null);
xColumn = wQueryColumns.getColumn(xColumnName);
//Seta o título padrão do cabeçalho com o mesmo nome da coluna no select(considerando o AS)
xColumn.setDisplayColumnName(DBSString.toProper(xColumnName));
//Seta o tamanho do campo conforme o tamanho da coluna na tabela
xColumn.setDisplaySize(pResultSetMetaData.getColumnDisplaySize(x));
//System.out.println(pResultSetMetaData.getColumnLabel(x).toUpperCase().trim() + ":" + pResultSetMetaData.isAutoIncrement(x));
//Seta coluna como PK caso faça parte da lista de colunas definida na UK informada pelo usuário
if (pvIsUK(xColumnName)){
xColumn.setPK(true);
}
xColumn.setAutoIncrement(pResultSetMetaData.isAutoIncrement(x));
//Seta coluna UK criada
if (xColumnName.equals(UKName)){
wQueryColumns.getColumn(UKName).setDisplayColumn(false);
}
}
}catch(SQLException e){
DBSIO.throwIOException(e, wConnection);
}
}
/**
* Salva os valores do ResultSet nas variáveis locais e ativa indicador que existe um novo registro corrente.
* @throws DBSIOException
*/
private void pvSetRowPositionChanged() throws DBSIOException{
super.setRowPositionChanged(true);
}
/**
* Salva os valores do ResultSet nas variáveis locais e dispara os eventos beforeRead e afterRead.
* @throws DBSIOException
*/
private void pvCopyValueFromResultDataModel(int pRowIndex) throws DBSIOException{
// if (getResultDataModel()==null){
// return;
// }
// getResultDataModel().setRowIndex(pRowIndex);
if (pRowIndex != wCurrentRowIndex){
wCurrentRowIndex = pRowIndex;
pvSetRowPositionChanged();
//Atualiza valores atuais e reseta valor original
pvFireEventBeforeRead();
if (!wResultDataModel.isRowAvailable()){
restoreValuesDefault();
}else{
for (DBSColumn xColumn: wQueryColumns.getColumns()){
this.setValue(xColumn.getColumnName(), pvGetResultDataModelValueConvertedToDataType(xColumn.getColumnName(), xColumn.getDataType()), true);
}
}
pvFireEventAfterRead(true);
}
}
//*****************************************************************************************************
// PRIVATE
//*****************************************************************************************************
/**
* Retorna valor do resultset convertido para o tipo informado em <b>pDataType</b>
* @param pColumnName
* @param pDataType
* @return
* @throws DBSIOException
*/
@SuppressWarnings("unchecked")
private <A> A pvGetResultDataModelValueConvertedToDataType(String pColumnName, DATATYPE pDataType) throws DBSIOException{
return (A) DBSIO.toDataTypeValue(pDataType, wResultDataModel.getRowData().get(pColumnName)); //TODO
}
private boolean pvMove(MOVE_DIRECTION pDirection) throws DBSIOException {
boolean xB = false;
if (wConnection!=null){
//Força para que o rowindex do resultaset seja o mesmo que foi utilizado para armazenar os dados
//Se for para chmara o evento
xB = pvFireEventBeforeMove();
int xRowIndex = 0;
if (xB){
if (pDirection == MOVE_DIRECTION.BEFORE_FIRST){
restoreValuesDefault();
pvCreateDataModel();
}
xRowIndex = DBSIO.getIndexAfterMove(getCurrentRowIndex(), getRowsCount(), pDirection);
if (DBSIO.getIndexAfterMoveIsOk(getCurrentRowIndex(), xRowIndex, getRowsCount(), pDirection)){
xB = setCurrentRowIndex(xRowIndex);
}else{
xB = false;
}
pvFireEventAfterMove(xB);
}
}
return xB;
}
/**
* @return Retorna as colunas utilizadas como UK, já concatenadas.
*/
private String pvGetUKConcatenaded(){
String xS = DBSString.changeStr(wUK, " ", "");
return DBSString.changeStr(xS, ",", " || ");
}
/**
* Configura a chave que identificará a PK da tabela que sofrerá a edição.<br/>
* Caso a chave seja composta por mais de uma coluna, as colunas deverão estar separadas por vírgula.
* @param pUK
*/
private void pvSetPK(String pPK){
this.wPK = pvCreatePKString(pPK, this.wCommandTableName); //Incluido nome da tabela na formação da chave /23/07/2013
this.wPKs = pvCreatePKArray(pPK);
}
/**
* Configura a chave que identificará a UK dos registros, podendo ser composta por colunas de mais de uma tabela ou <b>alias</b>.<br/>
* Caso a chave seja composta por mais de uma coluna, as colunas deverão estar separadas por vírgula.
* @param pUK
*/
private void pvSetUK(String pUK){
this.wUK = pvCreatePKString(pUK, null);
this.wUKs = pvCreatePKArray(pUK);
}
/**
* Cria String da PK a partir da PK informada, padronizando o conteúdo.
* @param pPK
* @return
*/
private String pvCreatePKString(String pPK, String pTableName){
String xTableAlias = pTableName;
if (!DBSObject.isEmpty(wQuerySQL) && !DBSObject.isEmpty(xTableAlias)){
//Retorna o nome da tabela ou alias se existir.
xTableAlias = DBSIO.getTableFromQuery(wQuerySQL, true, xTableAlias);
}
if (DBSObject.isEmpty(pPK)){
return "";
}
String xPK = pPK.trim().toUpperCase();
String[] xPKs;
if (pTableName == null){
pTableName = "";
}else{
pTableName = xTableAlias.trim().toUpperCase() + ".";
}
xPK = DBSString.changeStr(xPK, ",", " ");
//Exclui o nome da tabela da chave, caso tenha sido incluida pelo usuário, já que será obrigatóriamente adicionada no código abaixo
xPK = DBSString.changeStr(xPK, pTableName, "");
xPKs = xPK.split("\\s+");
xPK = "";
for (String xP: xPKs){
if (!xPK.equals("")){
xPK = xPK + ",";
}
xPK = xPK + pTableName + xP.trim();
}
if (xPK.equals("")){
return xPK;
}else{
return xPK.trim();
}
}
/**
* Retorna array das chaves(PK/UK) desconsiderando o nome da tabela, se houver.
* @param pPK
* @return
*/
private static String[] pvCreatePKArray(String pPK){
if (pPK == null ||
pPK.equals("")){
return new String[]{};
}
String[] xPKs;
int xN;
xPKs = pPK.split(",");
for (int xI = 0; xI < xPKs.length; xI++){
xPKs[xI] = xPKs[xI].trim();
xN = xPKs[xI].lastIndexOf(".");
if (xN != -1){
xPKs[xI] = xPKs[xI].substring(xN+1);
}
}
return xPKs;
}
/**
* Retorna se coluna informada é uma UK.
* @param pColumnName
* @return
*/
private boolean pvIsUK(String pColumnName){
pColumnName = pColumnName.trim().toUpperCase();
return (DBSString.findStringInArray(wUKs, pColumnName) > -1);
}
/**
* Retorna se coluna informada é uma PK.
* @param pColumnName
* @return
*/
private boolean pvIsPK(String pColumnName){
pColumnName = pColumnName.trim().toUpperCase();
return (DBSString.findStringInArray(wPKs, pColumnName) > -1);
}
/**
* Retorna o nome da coluna em caixa alta, trim e sem o nome date tabela, se houver.
* @param pColumnName
* @return
*/
private String pvGetColumnName(String pColumnName){
if (pColumnName == null){
return "";
}
pColumnName = pColumnName.toUpperCase().trim();
int xI = pColumnName.indexOf(".");
if (xI > 0){
return pColumnName.substring(xI + 1);
}else{
return pColumnName;
}
}
@SuppressWarnings("unchecked")
@Override
protected final synchronized <A> List<A> pvGetList(boolean pReturnListDataModel) throws DBSIOException {
//Executa a Select para recuperar os registros
if (pReturnListDataModel &&
wDataModelClass == null){
return null;
}
List<DBSRow> xListRow = new ArrayList<DBSRow>();
List<DataModelClass> xListDataModel = new ArrayList<DataModelClass>();
//Cria nova linha
DBSRow xColumns = null;
//objeto com base no DataModel
DataModelClass xDataModel = null;
//Popula o list com todos os registros do resultset
if (pReturnListDataModel){
xListDataModel.clear();
}else{
xListRow.clear();
}
//Loop de todos os registros do resulset
moveBeforeFirstRow();
while (moveNextRow()){
if (pReturnListDataModel){
//Cria novo objeto com base no DataModel
xDataModel = this.createDataModel();
}else{
//Cria nova linha
xColumns = new DBSRow();
}
// //Recuper o conteúdo de todas as colunas do registro corrente
for (int x=1; x< getColumns().size(); x++){
if (!pReturnListDataModel){
xColumns.MergeColumn(getColumn(x).getColumnName(),
getColumn(x).getDataType(),
getColumn(x).getDisplaySize(),
null);
//Copia o valor para a coluna da linha
xColumns.setValue(getColumn(x).getColumnName(), pvGetResultDataModelValueConvertedToDataType(getColumn(x).getColumnName(), getColumn(x).getDataType()));
}
//Adiciona a coluna a linha
if (xDataModel!=null){
//Copia o valor para o respectivo atributo no DataModel(Se houver)
pvSetDataModelValue(xDataModel, getColumn(x).getColumnName(), pvGetResultDataModelValueConvertedToDataType(getColumn(x).getColumnName(), getColumn(x).getDataType()));
}
}
//Adiciona linha como DataModel
if (pReturnListDataModel){
xListDataModel.add(xDataModel);
}else{
//Adiciona linha como DBSRow
xListRow.add(xColumns);
}
}
if (!moveFirstRow()) {
moveBeforeFirstRow();
}
if (pReturnListDataModel){
return (List<A>) xListDataModel;
}else{
//Adiciona linha como DBSRow
return (List<A>) xListRow;
}
}
/**
* Verifica se foi definida a tabela para edição e respectivas colunas
* @param pMethodName
* @return
*/
private boolean pvCheckColumnSize(String pMethodName){
if (wCommandColumns.size() == 0){
if (DBSObject.isEmpty(getCommandTableName())){
wLogger.error("DBSDAO:" + pMethodName + ": Não foi informada a tabela que sofrerá a edição.");
}else{
wLogger.error("DBSDAO:" + pMethodName + ": Não foram encontradas colunas para efetuar a edição.[" + getCommandTableName() + "]");
}
return false;
}
return true;
}
//=========================================================================================================
//Overrides
//=========================================================================================================
@Override
public void beforeOpen(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterOpen(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeInsert(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterInsert(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeRead(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterRead(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeUpdate(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterUpdate(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeMerge(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterMerge(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeDelete(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterDelete(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeMove(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterMove(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeClose(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterClose(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
}
|
src/main/java/br/com/dbsoft/io/DBSDAO.java
|
package br.com.dbsoft.io;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.faces.context.FacesContext;
import javax.faces.event.PhaseId;
import javax.faces.model.DataModelEvent;
import javax.faces.model.DataModelListener;
import javax.servlet.jsp.jstl.sql.Result;
import javax.servlet.jsp.jstl.sql.ResultSupport;
import br.com.dbsoft.annotation.DBSTableModel;
import br.com.dbsoft.core.DBSSDK.IO.DATATYPE;
import br.com.dbsoft.error.DBSIOException;
import br.com.dbsoft.message.DBSMessage;
import br.com.dbsoft.util.DBSIO;
import br.com.dbsoft.util.DBSIO.MOVE_DIRECTION;
import br.com.dbsoft.util.DBSObject;
import br.com.dbsoft.util.DBSString;
/**
* @param <DataModelClass> Classe Model da tabela do banco de dados ou classe com atributos homônimos as colunas com as quais se deseje trabalhar no DAO.<br/>
* É necessário também passar esta classe no construtor.
*/
/**
* @author ricardo.villar
*
* @param <DataModelClass>
*/
public class DBSDAO<DataModelClass> extends DBSDAOBase<DataModelClass> {
private static final long serialVersionUID = -3273102169619413848L;
public static enum COMMAND{
INSERT,
UPDATE,
DELETE,
SELECT,
LOCK;
}
//#########################################################################################################
//## Working Variables #
//#########################################################################################################
private Connection wConnection;
private String wQuerySQL = "";
//private ResultSet wSelectResultSet;
private String wUK = ""; //Colunas da pesquisa, separada por virgula, que será usadas para retornar ao mesmo registro em caso de refresh
private String wPK = "";
private String[] wPKs = new String[]{};
private String[] wUKs = new String[]{};
private ResultSetMetaData wQueryResultSetMetaData;
private String wQuerySQLUK=""; //Query SQL alterada a partir da wsQuerySql, para conter a informação necessários para o controle dos registros
private DBSRow wQueryColumns = new DBSRow(); //Colunas existente na pesquisa
private DBSRow wCommandColumns = new DBSRow(); //Colunas que sofreráo modificação de dados
private String wCommandTableName = ""; //Nome da tabela que sofrerá modificação de dados
private boolean wAutoIncrementPK = true; //Efetua o insert e recuperar o valores do campos com autoincrement;
private DBSResultDataModel wResultDataModel;
private int wCurrentRowIndex = -1;
private boolean wIsMerging = false;
private DataModelListener wDataModelListener = new DataModelListener(){
@Override
public void rowSelected(DataModelEvent pEvent) {
if (FacesContext.getCurrentInstance() == null){return;}
PhaseId xId = FacesContext.getCurrentInstance().getCurrentPhaseId();
// System.out.println("PahseId:" + xId.toString());
// System.out.println("PahseId:" + getQuerySQL());
// System.out.println("PahseId:" + FacesContext.getCurrentInstance().getPartialViewContext().isAjaxRequest() + ":" + xId.toString() + "[" + wCurrentRowIndex + ":" + pEvent.getRowIndex() + "]");
// if ((!xId.equals(PhaseId.RENDER_RESPONSE) && !xId.equals(PhaseId.INVOKE_APPLICATION)) || pEvent.getRowIndex() != -1){
// }
if (xId == null
// || (!xId.equals(PhaseId.RENDER_RESPONSE) && !xId.equals(PhaseId.INVOKE_APPLICATION)) || pEvent.getRowIndex() != -1) {
|| (xId.equals(PhaseId.INVOKE_APPLICATION) && pEvent.getRowIndex() != -1)){
try {
pvCopyValueFromResultDataModel(pEvent.getRowIndex());
} catch (DBSIOException e) {
wLogger.error(e);
}
}
// if (xId == null
// || ((xId.equals(PhaseId.RENDER_RESPONSE) || xId.equals(PhaseId.INVOKE_APPLICATION)) && pEvent.getRowIndex() != -1)){
// try {
// synchronize();
// } catch (DBSIOException e) {
// wLogger.error(e);
// }
// }
// if (xId == null
// || (pEvent.getRowIndex() != -1 &&
// ((xId.equals(PhaseId.INVOKE_APPLICATION) && pEvent.getRowIndex() != -1) || xId.equals(PhaseId.RENDER_RESPONSE)))){
// setCurrentRowIndex(pEvent.getRowIndex());
// }else{
// System.out.println("stop");
// }
}
};
//private boolean wHasVersionControl=false;
//#########################################################################################################
//## Public Properties #
//#########################################################################################################
/**
* Cria novo DAO.
* @param pConnection Conexão com o banco de dados
*/
public DBSDAO(Connection pConnection) {
this.setConnection(pConnection);
}
/**
* Cria novo DAO.
* @param pDataModel Classe Model da tabela do banco de dados ou classe com atributos homônimos as colunas com as quais se deseje trabalhar no DAO.<br/>
* @param pConnection
* @throws DBSIOException
*/
public DBSDAO(Class<DataModelClass> pDataModelClass, Connection pConnection) throws DBSIOException {
super(pDataModelClass);
this.setConnection(pConnection);
//Recupera nome da tabela, através da anotação @DataModel, caso exista na classe informada
DBSTableModel xAnnotation = DBSIO.getAnnotationDataModel(wDataModel);
if (xAnnotation!=null){
this.setCommandTableName(xAnnotation.tablename());
}
}
/**
* Cria novo DAO.
* @param pConnection Conexão com o banco de dados
* @param pCommandTableName Nome da tabela que sofrerá Insert/Update/Delete.<br/>
* O nome deverá ser o nome exato que está no banco de dados.<br/>
* Certifique-se que não há problema de letra maiúscula ou minúscula para encontrar a tabela no banco.<br/>
* Caso também esteja sendo executada uma <i>query</i> via <b>open</b>, está tabela <b>não</b> poderá ter um <i>alias</i>.
* @throws SQLException
*/
public DBSDAO(Connection pConnection, String pCommandTableName) throws DBSIOException{
this.setConnection(pConnection);
this.setCommandTableName(pCommandTableName);
}
/**
* Cria novo DAO.
* @param pDataModelClass Classe Model da tabela do banco de dados ou classe com atributos homônimos as colunas com as quais se deseje trabalhar no DAO.
* @param pConnection Conexão com o banco de dados.
* @param pCommandTableName Nome da tabela que sofrerá Insert/Update/Delete.<br/>
* Certifique-se que não há problema de letra maiúscula ou minúscula para encontrar a tabela no banco.
* @throws SQLException
*/
public DBSDAO(Class<DataModelClass> pDataModelClass, Connection pConnection, String pCommandTableName) throws DBSIOException{
super(pDataModelClass);
this.setConnection(pConnection);
this.setCommandTableName(pCommandTableName);
}
/**
* Cria novo DAO.
* @param pConnection Conexão com o banco de dados.
* @param pCommandTableName Nome da tabela que sofrerá Insert/Update/Delete.<br/>
* Certifique-se que não há problema de letra maiúscula ou minúscula para encontrar a tabela no banco.
* @param pPK Nomes das colunas que serão utilizadas para identificar se é um registro único separadas por virgula(,).<br/>
* Esta informação precisa ser passada caso as colunas que são PK não estejam configuradas no banco de dados como tal.
* @throws SQLException
*/
public DBSDAO(Connection pConnection, String pCommandTableName, String pPK) throws DBSIOException{
this.setConnection(pConnection);
this.setCommandTableName(pCommandTableName, pPK);
}
/**
* Cria novo DAO.
* @param pDataModelClass Classe Model da tabela do banco de dados ou classe com atributos homônimos as colunas com as quais se deseje trabalhar no DAO.
* @param pConnection Conexão com o banco de dados.
* @param pCommandTableName Nome da tabela que sofrerá Insert/Update/Delete.<br/>
* Certifique-se que não há problema de letra maiúscula ou minúscula para encontrar a tabela no banco.
* @param pPK Nomes das colunas que serão utilizadas para identificar se é um registro único, separadas por virgula(,).
* Esta informação precisa ser passada caso as colunas que são PK não estejam configuradas no banco de dados como tal.
* @throws SQLException
*/
public DBSDAO(Class<DataModelClass> pDataModelClass, Connection pConnection, String pCommandTableName, String pPK) throws DBSIOException{
super(pDataModelClass);
this.setConnection(pConnection);
this.setCommandTableName(pCommandTableName, pPK);
}
/**
* Retorna a Conexão.
* @param pConnection Conexão com o banco de dados
*/
public final void setConnection(Connection pConnection) {
if (!DBSObject.isEmpty(pConnection)){
this.wConnection = pConnection;
}
}
/**
* Configura a Conexão.
* @return Conexão com o banco
*/
public final Connection getConnection() {
return wConnection;
}
/**
* Retorna comando SQL utilizado para efetuar a pesquisa.
* @return Comando Sql utilizado
*/
public final String getQuerySQL() {
return wQuerySQL;
}
/**
* Configura o comando SQL utilizado para pesquisa.<br/>
* @param pQuerySQL
*/
public final void setQuerySQL(String pQuerySQL) {
wQuerySQL = pQuerySQL;
}
/**
* @return Quantidade de colunas da pesquisa.
* @throws SQLException
*/
public final int getQueryColumnsCount() throws SQLException{
return wQueryResultSetMetaData.getColumnCount();
}
/**
* Retorna o total de linhas da Query no caso de ter efetuado uma pesquisa via <b>open</b>.<br/>
* Retorna o total de linhas da tabela no caso de <b>não</b> ter efetuado uma pesquisa via <b>open</b>, mas ter definido a <b>CommandTableName</b>.
* @return Quantidade.
* @throws DBSIOException
*/
@Override
public final int getRowsCount() throws DBSIOException{
if (wResultDataModel == null){
if (DBSObject.isEmpty(this.wQuerySQL)
&& !DBSObject.isEmpty(this.wCommandTableName)){
return DBSIO.getTableRowsCount(this.wConnection, this.wCommandTableName);
}
return 0;
}else{
return wResultDataModel.getRowCount();
}
// Comentado em 3/dez/2013 - Aparentemente, código abaixo era desnecessário. - Ricardo
// try {
// if (!DBSObject.isEmpty(this.wQuerySQL)){
// return DBSIO.getSQLRowsCount(this.wConnection, this.wQuerySQL);
// }else if (!DBSObject.isEmpty(this.wCommandTableName)){
// return DBSIO.getTableRowsCount(this.wConnection, this.wCommandTableName);
// }else{
// return 0;
// }
// } catch (DBSIOException e) {
// wLogger.error("getRowsCount", e);
// return 0;
// }
}
/**
* Retorna a indice do registro corrente.
* Caso não haja registro, retorna -1.
* @return
*/
public final int getCurrentRowIndex(){
if (getResultDataModel()==null){
wCurrentRowIndex = -1;
}
return wCurrentRowIndex; //getResultDataModel().getRowIndex();
}
/**
* Seta o registro corrente a partir o indice informado.<br/>
* Caso indice seja maior que o existente, posiciona no último.<br/>
* Caso não existam registros, posiciona no 'anterior ao primeiro'(-1).
* @param pRowIndex
* @throws DBSIOException
*/
public final boolean setCurrentRowIndex(int pRowIndex) throws DBSIOException{
if (getResultDataModel()==null){
return false;
}
boolean xOk = true;
//Posiciona no último registro caso o valor informado seja maior a quantidade de registros existentes
if ((pRowIndex + 1) > getResultDataModel().getRowCount()){
pRowIndex = getResultDataModel().getRowCount() - 1;
xOk = false;
}
//Posiciona no registro anterior ao primeiro caso o valor informado seja inferior a -1
if (pRowIndex < -1){
pRowIndex = - 1;
xOk = false;
}
//Indica qual o registro corrente a partir o indice informado.
//Este comando dispara automativamente o evento rowSelected, que por sua vez chama pvSetRowPositionChanged,
//onde os valores são recupedados do resultset e copiados para as variáveis locais
if (pRowIndex != getResultDataModel().getRowIndex()){
getResultDataModel().setRowIndex(pRowIndex);
}
pvCopyValueFromResultDataModel(pRowIndex);
return xOk;
}
/**
* Retorna se o registro atual é um novo registro.<br/>
* Os dados deste registro existem somente em memória, sendo necessário implementar a rotina para salva-los.
* @return
*/
public final boolean getIsNewRow(){
if (wResultDataModel == null){
return false;
}
if (wResultDataModel.getRowIndex() > (getRowsCountAfterRefresh() - 1)){
return true;
}
return false;
}
public boolean isMerging() {
return wIsMerging;
}
public void setMerging(boolean pMerging) {
wIsMerging = pMerging;
}
/**
* Nome da coluna na tabela ou alias(as) atribuido no select.
* @param pColumnIndex númeroda coluna que se deseja saber o nome
* @return Nome da coluna
* @throws SQLException
*/
public final String getQueryColumnName(Integer pColumnIndex) throws SQLException{
return wQueryResultSetMetaData.getColumnName(pColumnIndex);
}
/**
* Retorna todas as colunas da query.
* @return
*/
@Override
public final Collection<DBSColumn> getColumns() {
return wQueryColumns.getColumns();
}
/**
* Retorna coluna a partir do nome informado.<br/>
* Caso a coluna não exista na query,
* será pesquisado também na tabela principal que sofrerá a edição(se houver).
* @param pColumnName
* @return
*/
@Override
public final DBSColumn getColumn(String pColumnName) {
if (pColumnName==null){return null;}
String xColumnName = pvGetColumnName(pColumnName);
if (wQueryColumns.containsKey(xColumnName)){
return wQueryColumns.getColumn(xColumnName);
}else if (wCommandColumns.containsKey(xColumnName)){
return wCommandColumns.getColumn(xColumnName);
}
wLogger.error("getValue:Coluna não encontrada.[" + pColumnName + "][" + wQuerySQL + "][" + wCommandTableName + "]");
return null;
}
/**
* retorna a coluna a partir no número informado
* @return
*/
@Override
public final DBSColumn getColumn(int pColumnIndex) {
return wQueryColumns.getColumn(pColumnIndex);
}
/**
* @return Todas as colunas da tabela que sofrerá modificação.
*/
public final Collection<DBSColumn> getCommandColumns() {
return wCommandColumns.getColumns();
}
/**
* Retorna coluna da tabela.
* @param pColumnName Nome da coluna que se deseja os dados
* @return Coluna ou null se não for encontrada
*/
public final DBSColumn getCommandColumn(String pColumnName){
if (pColumnName==null){return null;}
pColumnName = pvGetColumnName(pColumnName);
return wCommandColumns.getColumn(pvGetColumnName(pColumnName));
}
/**
* @return Noma da tabela que sofrerá a modificação
*/
public final String getCommandTableName() {
return wCommandTableName;
}
/**
* Configura o nome da tabela que sofrerá as modificações(<b>INSERT, UPDATE, DELETE</b>).<br/>
* Não é necessário efetuar uma <i>query</i> via <i>Open</i> para efetuar as modificações.
* No entando, os dados recuperados pela query, serão aproveitados para efeito das modificações,
* facilitando, por exemplo, um <b>Update</b> no registro corrente.
* @param pCommandTableName Nome da tabela.
* @throws SQLException
*/
public final void setCommandTableName(String pCommandTableName) throws DBSIOException{
setCommandTableName(pCommandTableName, "");
}
/**
* Configura o nome da tabela que sofrerá as modificações.<br/>
* Não é necessário efetuar uma <i>query</i> via <i>Open</i> para efetuar as modificações.
* No entando, os dados recuperados pela query, serão aproveitados para efeito das modificações,
* facilitando, por exemplo, um <b>Update</b> no registro corrente.
* @param pCommandTableName Nome da tabela que sofrerá a modificação
* @param pPK Nomes da colunas que vão representar a chave primária, caso queira forçar ou definir uma PK, mesmo que não exista na tabela.
* @throws DBSIOException
*/
public final void setCommandTableName(String pCommandTableName, String pPK) throws DBSIOException{
if (!DBSObject.isEmpty(pCommandTableName) && //Se nome não for vazio
!wCommandTableName.equals(pCommandTableName.trim())){//Se nome da table for diferente da anterior
if (wQueryColumns.size() > 0){
wLogger.error("DBSDAO: CommandTableName deve ser configurada ANTES de efetuar o Open() ou no momento da criação da nova instância do DAO.");
}
this.wCommandTableName = pCommandTableName.trim();
pvSetPK(pPK);
pvCreateCommandColumns();
}
}
/**
* Retorna nome das colunas que identificam a chave primária dos resgistros
* da tabela pricipal que sofrerá a edição, conforme definição da <b>commandTableName</b>.<br/>
* No caso de haver mais de uma coluna como PK, os nomes das colunas serão separados por vírgula.
* @return
*/
public final String getPK(){
return wPK;
}
/**
* Retorna uma string contendo os nomes das colunas que formam o UK que será responsável
* por identificar um linha única, podente haver colunas de mais de uma tabela ou <b>alias</b>.<br/>
* No caso de haver mais de uma coluna como UK, os nomes das colunas serão separados por vírgula.
* @return
*/
@Override
public final String getUK(){
return wUK;
}
/**
* Retorna valor da UK assumunindo que há somente uma coluna.<br/>
* Coluna pode ser um <b>alias</b> de mais de uma coluna.
* @return
*/
@Override
public final Object getUKValue(){
return this.getValue(UKName);
}
/**
* Indica se a coluna que é PK é de auto-incremento. O padrão é TRUE.
* Se a tabela possuir mais de uma coluna como PK, o padrão passa a ser FALSE.
* @return
*/
public final boolean isAutoIncrementPK() {
return wAutoIncrementPK;
}
/**
* Indica se a coluna que é PK é de auto-incremento.
* @param pAutoIncrementPK
*/
public final void setAutoIncrementPK(boolean pAutoIncrementPK) {
this.wAutoIncrementPK = pAutoIncrementPK;
}
/**
* Retorna o valor da coluna antes de alteração
* @param pColumnName
* @return
*/
public final <A> A getValueOriginal(String pColumnName){
if (pColumnName==null){return null;}
String xColumnName = pvGetColumnName(pColumnName);
if (wCommandColumns.containsKey(xColumnName)){
return wCommandColumns.<A>getValueOriginal(xColumnName);
}else if (wQueryColumns.containsKey(xColumnName)){
return wQueryColumns.<A>getValueOriginal(xColumnName);
}
if (getShowColumnNameNotFoundMessage()){
wLogger.error("DBSDAO.getValue:Coluna não encontrada.[" + pColumnName + "][" + wQuerySQL + "]");
}
return null;
}
// public static <A> A getValueX(String pColumnName){
// Double x = 0D;
// return (A) x;
// }
//
/**
* Retorna valor da coluna
* @param pColumnName Nome da coluna
* @return Valor
*/
@Override
public final <A> A getValue(String pColumnName){
if (pColumnName==null){return null;}
String xColumnName = pvGetColumnName(pColumnName);
//Retorna valor do DataModel se existir
if (wDataModel != null){
A xValue = pvGetLocalDataModelValue(xColumnName);
//Retorna valor se não for nulo.
if (xValue != null){
return xValue;
}else{
//Verifica se o campo existe no dataModel. Se não existir, tentará encontrar a coluna no wCommandColumn ou wSelectColumn abaixo
Field xField = DBSIO.getDataModelField(wDataModel, xColumnName);
if (xField!=null){
return null;
}
}
}
//Retorna valor a parti do controle local das colunas de comando
if (wCommandColumns.containsKey(xColumnName)){
return wCommandColumns.<A>getValue(xColumnName);
//Retorna valor a parti do controle local das colunas de pesquisa
}else if (wQueryColumns.containsKey(xColumnName)){
return wQueryColumns.<A>getValue(xColumnName);
}
if (getShowColumnNameNotFoundMessage()){
wLogger.error("DBSDAO.getValue:Coluna não encontrada.[" + pColumnName + "][" + wQuerySQL + "]");
}
return null;
}
/**
* Retorna o valor da coluna convertida para a classe do tipo informado
* @param pColumnName Nome da coluna
* @param pValueClass Classe para a qual será convertido o valor recebido
* @return
*/
public final <A> A getValue(String pColumnName, Class<A> pValueClass){
return DBSObject.<A>toClass(getValue(pColumnName), pValueClass);
}
@Override
public final void setValue(String pColumnName, Object pValue){
this.setValue(pColumnName, pValue, false);
}
@Override
public final void setValue(String pColumnName, Object pValue, boolean pOriginalValue){
if (pColumnName==null){return;}
boolean xAchou = false;
String xColumnName = pvGetColumnName(pColumnName);
//Seta o valor na coluna da tabela que poderá sofrer alteração
if (wCommandColumns.containsKey(xColumnName)){
wCommandColumns.setValue(xColumnName, pValue, pOriginalValue);
xAchou = true;
}
//Seta o valor na coluna do select, independentemente de ser uma coluna que da tabela comando(CommandTableName)
if (wQueryColumns.containsKey(xColumnName)){
wQueryColumns.setValue(xColumnName, pValue, pOriginalValue);
xAchou = true;
}
if (this.pvSetLocalDataModelValue(xColumnName, pValue)){
xAchou = true;
}
if (!xAchou
&& getShowColumnNameNotFoundMessage()){
wLogger.error("DBSDAO.setValue:Coluna não encontrada.[" + pColumnName + "][" + wQuerySQL + "]");
}
}
@Override
public DBSMessage getMessage(String pColumnName) {
if (pColumnName==null){return null;}
String xColumnName = pvGetColumnName(pColumnName);
//Retorna valor do DataModel se existir
if (wDataModel != null){
return null;
}
//Retorna valor a parti do controle local das colunas de comando
if (wCommandColumns.containsKey(xColumnName)){
return wCommandColumns.getColumn(pColumnName).getMessage();
//Retorna valor a parti do controle local das colunas de pesquisa
}else if (wQueryColumns.containsKey(xColumnName)){
return wQueryColumns.getColumn(pColumnName).getMessage();
}
if (getShowColumnNameNotFoundMessage()){
wLogger.error("DBSDAO.getValue:Coluna não encontrada.[" + pColumnName + "][" + wQuerySQL + "]");
}
return null;
}
/**
* Retorna valor da coluna diretamente do ResultDataModel.<br/>
* Isto é utilizado, durante a atualização de um dataTable,
* os dados do registro atual somente poderam ser recuperado por este método.
* @param pColumnName
* @return
*/
@SuppressWarnings("unchecked")
public final <A> A getListValue(String pColumnName){
if (wResultDataModel != null
&& wResultDataModel.getRowIndex() != -1){ //Incluido em 19/mar/2015 - Ricardo
String xColumnName = pvGetColumnName(pColumnName);
return (A) wResultDataModel.getRowData().get(xColumnName);
}else{
return null;
}
}
/**
* Retorna valor da coluna informada diretamente do ResultDataModel convertida para a classe do tipo informado.
* @param pColumnName Nome da coluna
* @param pValueClass Classe para a qual será convertido o valor recebido
* @return
*/
public final <A> A getListValue(String pColumnName, Class<A> pValueClass){
return DBSObject.<A>toClass(getListValue(pColumnName), pValueClass);
}
/**
* Seta o valor da coluna informada diretamenteo no ResultDataModel.
* @param pColumnName Nome da coluna
* @param pValue Valor da coluna
* @return
*/
public final void setListValue(String pColumnName, Object pValue){
if (wResultDataModel != null){
String xColumnName = pvGetColumnName(pColumnName);
wResultDataModel.getRowData().put(xColumnName, pValue);
}
}
/**
* Retorna o comando que será executado no INSERT, UPDATE, DELETE, SELECT
* @param pCommand Comando que será executado
* @return Retorna a String contendo o comando SQL que será executado
* @throws DBSIOException
*/
public final String getSQLExecuteCommand(DBSDAO.COMMAND pCommand) throws DBSIOException{
String xStr = "";
pvCopyDataModelFieldsValueToCommandValue(wDataModel);
xStr = DBSIO.getDAOSQLCommand(this, pCommand);
return xStr;
}
/**
* Use este atributo para popular páginas pelos ManagedBeans.
* Retorna os registro de forma que possam ser entendidos nas páginas xhtml por EL.
* As colunas poderão ser acessadas diretamente como atributos de uma classe.<br/>
* Os nomes dos atributos são os próprios nomes definidos as colunas do select.<br/>
* exemplo de código xhtlm "#{table.campo}"
* @return
*/
public final DBSResultDataModel getResultDataModel(){
return wResultDataModel;
}
/**
* Verifica se coluna existe conforme o nome informado
* @param pColumnName
* @return true = Existe / false = não existe
*/
public final boolean containsColumn(String pColumnName){
if (pColumnName==null){return false;}
String xColumnName = pvGetColumnName(pColumnName);
if (wCommandColumns.containsKey(xColumnName)){
return true;
}else if (wQueryColumns.containsKey(xColumnName)){
return true;
}
return false;
}
/**
* Executa a query informada em setQuerySQL() ou a executada anteriormente, caso exista.<br/>
* Caso deseje somente atualizar os dados da query, utilize o método <b>refresh()<b>.<br/>
* O cursor é posicionado no primeiro registro, sendo necessário executar moveBeforeFirst quando se deseja utilizar while(dao.movenext)
* @throws DBSIOException
*/
@Override
public final boolean open() throws DBSIOException{
return this.open(wQuerySQL, getUK());
}
/**
* Executa a query informada em <b>pQuerySQL</b>.
* O cursor é posicionado no primeiro registro, sendo necessário executar moveBeforeFirst quando se deseja utilizar while(dao.movenext)
* @param pQuerySQL Select SQL que será utlizada para efetuar a pesquisa
* @throws DBSIOException
*/
@Override
public final boolean open(String pQuerySQL) throws DBSIOException{
return this.open(pQuerySQL,"");
}
/**
* Executa a query informada em <b>pQuerySQL</b>.
* O cursor é posicionado no primeiro registro, sendo necessário executar moveBeforeFirst quando se deseja utilizar while(dao.movenext)
* @param pQuerySQL Query padrão SQL ANSI que será utlizada para efetuar a pesquisa
* @param pUK String com os nomes das columnas(separadas por virgula) que será utilizadas como UK dos registros,
* em substituição a PK da tabela, caso exista ou tenha sido informada no construtor do DAO.
* @return true = Sem erro / false = erro / null = exception
* @throws DBSIOException
*/
public final synchronized boolean open(String pQuerySQL, String pUK) throws DBSIOException{
//System.out.println("DBSDO - open INICIO-----------------------------");
if (wConnection == null){
wLogger.error("DBSDAO:open: Conexão não informada.");
return false;
}
if (DBSObject.isEmpty(pQuerySQL)){
wResultDataModel = null;
wQueryResultSetMetaData = null;
return false;
}
//Se não for uma pesquisa com o comando 'SELECT', ignora a chamada
if (DBSString.getInStr(pQuerySQL, "SELECT ",false)==0){
return false;
}
wQuerySQLUK = pQuerySQL.trim();
wQuerySQL = wQuerySQLUK;
if (DBSObject.isEmpty(pUK)){ //Configura a UK dos registros se o conteúdo não for vazio
//Configura a UK como sendo a PK tabela, caso a pesquisa seja de uma única tabela
if (DBSString.getStringCount(wQuerySQLUK, "Select", false) <= 1){
pvSetUK(wPK);
}else{
//Sendo uma pesquisa de mais de uma tabela, não utliza a PK como UK, passando a ser necessário que
//o usuário informe a UK caso queira identificar a posição do registro
pvSetUK("");
}
}else{
//Utiliza a UK informada;
pvSetUK(pUK);
}
//Se foi definido as columas de UK da pesquisa. Cria uma coluna de conterá a UK que será utilizada para identificar a linha
if (wUKs.length>0){
wQuerySQLUK = DBSIO.changeAsteriskFromQuerySQL(wQuerySQLUK);
if (DBSString.getInStr(wQuerySQLUK, " DISTINCT ", false) >0){
//Altera o(s) SELECT DISTINCT
wQuerySQLUK = DBSString.changeStr(wQuerySQLUK, "SELECT DISTINCT", "SELECT DISTINCT " + this.pvGetUKConcatenaded() + " AS " + UKName + ", ", false);
}else{
wQuerySQLUK = DBSString.changeStr(wQuerySQLUK, "SELECT ", "SELECT " + this.pvGetUKConcatenaded() + " AS " + UKName + ", ", false);
if (DBSString.getInStr(wQuerySQLUK, " UNION ", false) >0){
wQuerySQLUK = DBSString.changeStr(wQuerySQLUK, "UNION SELECT ", "UNION SELECT " + this.pvGetUKConcatenaded() + " AS " + UKName + ", ", false);
}
}
//Inclui coluna DBSUK no GROUP BY se houver
wQuerySQLUK = DBSString.changeStr(wQuerySQLUK, "GROUP BY ", "GROUP BY " + this.pvGetUKConcatenaded() + ", ", false);
}
//Atualiza a pesquisa
refresh();
return true;
}
/**
* Le os registros da tabela de comando(CommandTable) utilizando como filtro os valores das colunas definidas como chaves.<br/>
* Se não houver definição das colunas que são chaves, serão lidos todos os registros.<br/>
* A definição das colunas que são chaves é efetuada manualmente no construtor do DAO ou é recuperada automaticamente
* diretamente da definição da tabela no banco de dados.
* Também é possível indicar se a coluna é chave através do atributo <b>getCommandColumn("coluna").setPK(true).<b/>
* O parametro <b>pAdditionalSQLWhereCondition</b> é um filtro adicional.<br/>
* Este método é similar ao <b>open<b/>, porém a query SQL é criada automaticamente.
* Posiciona no primeiro registro lido de houver.<br/>
* Se não houver registro, retorna false.
* @param pAdditionalSQLWhereCondition Texto da condição(sem 'WHERE') a ser adicionada a cláusula 'WHERE' já gerada automaticamente. <br/>
* @return false se não encontrar nenhum registro
* @throws SQLException
*/
public synchronized boolean openCommandTable(String pAdditionalSQLWhereCondition) throws DBSIOException{
if (wCommandColumns.size() == 0){
wLogger.error("DBSDAO:executeUpdate: Não foram encontradas colunas alteradas para efetuar o comando de UPDATE.");
return false;
}
if (this.wConnection!=null){
String xSQLCommand = DBSIO.getDAOSQLCommand(this, COMMAND.SELECT, pAdditionalSQLWhereCondition);
open(xSQLCommand);
if (getRowsCount() > 0){
return true;
}
}
return false;
}
/**
* Le os registros da tabela de comando(CommandTable) utilizando como filtro os valores das colunas definidas como chaves.<br/>
* Se não houver definição das colunas que são chaves, serão lidos todos os registros.<br/>
* A definição das colunas que são chaves é efetuada manualmente no construtor do DAO ou é recuperada automaticamente
* diretamente da definição da tabela no banco de dados.
* Também é possível indicar se a coluna é chave através do atributo <b>getCommandColumn("coluna").setPK(true).<b/>
* Este método é similar ao <b>open<b/>, porém a query SQL é criada automaticamente.
* Posiciona no primeiro registro lido de houver.<br/>
* Se não houver registro, retorna false.
* @return false se não encontrar nenhum registro
* @throws SQLException
*/
public synchronized boolean openCommandTable() throws DBSIOException{
return openCommandTable("");
}
/**
* Fecha o DAO.
* É importante efetuar o close, caso se deseje utilizar o mesmo DAO para efetuar outras acessos.<br/>
* Um novo open em um DAO já fechado, irá executar a mesma query já informada anteriormente.
* @throws DBSIOException
*/
@Override
public final synchronized void close() throws DBSIOException{
if (pvFireEventBeforeClose()){
//Limpa lista de colunas do query para forçar a recriação em caso de novo open
wQueryColumns.clear();
wResultDataModel = null;
pvFireEventAfterClose(true);
}
}
/**
* Popula o resultset com os registros atuais e cria lista.
* @return true = Sem erro; false = Com erro
* @throws SQLException
*/
@SuppressWarnings("unchecked")
public final synchronized boolean refresh() throws DBSIOException{
//Executa a Select para recuperar os registros
if (pvFireEventBeforeOpen()){
ResultSet xSelectResultSet;
//Reset dos componentes
wCurrentRowIndex = -1;
wResultDataModel = null;
wQueryResultSetMetaData = null;
//-----------------
xSelectResultSet = DBSIO.openResultSet(this.getConnection(),wQuerySQLUK);
//wResultDataModel é necessário para consulta com html pois possibilita o acesso as colunas do registro
Result xResult = ResultSupport.toResult(xSelectResultSet);
wResultDataModel = new DBSResultDataModel(xResult.getRows());
xResult = null;
//Configura listener local para acomponhar seleção de registro
wResultDataModel.addDataModelListener(wDataModelListener);
try{
wQueryResultSetMetaData = xSelectResultSet.getMetaData();
pvCreateSelectColumns(wQueryResultSetMetaData);
//Chame evento
pvFireEventAfterOpen(true);
//Caso não exista o primeiro registro, move para posição inicial onde não há registro válido...
if (!moveFirstRow()) {
moveBeforeFirstRow();
}
setRowsCountAfterRefresh(getRowsCount());
return true;
}catch(SQLException e){
wLogger.error(e);
//Chame evento
pvFireEventAfterOpen(false);
DBSIO.throwIOException("refreshResultSet:" + wQuerySQLUK, e, wConnection);
return false;
}finally{
DBSIO.closeResultSet(xSelectResultSet);
}
}else{
//Chame evento
pvFireEventAfterOpen(false);
return false;
}
}
/**
* Atualizar os valores correntes com os dados da posição corrente do ResultDataModel.<br/>
* @throws DBSIOException
*/
public void synchronize() throws DBSIOException{
if (getResultDataModel() != null){
pvCopyValueFromResultDataModel(getResultDataModel().getRowIndex());
}
}
/**
* Retorna se está no primeiro registro
* @return
*/
public boolean getIsFist(){
if (wResultDataModel != null){
return wCurrentRowIndex==0;
}else{
return false;
}
}
/**
* Retorna se está no último registro
* @return
*/
public boolean getIsLast(){
if (wResultDataModel != null){
return wCurrentRowIndex == (wResultDataModel.getRowCount() - 1);
}else{
return false;
}
}
//#########################################################################################################
//## Public Methods #
//#########################################################################################################
@Override
public synchronized void moveBeforeFirstRow() throws DBSIOException{
pvMove(MOVE_DIRECTION.BEFORE_FIRST);
}
@Override
public synchronized boolean moveFirstRow() throws DBSIOException{
return pvMove(MOVE_DIRECTION.FIRST);
}
@Override
public synchronized boolean movePreviousRow() throws DBSIOException{
return pvMove(MOVE_DIRECTION.PREVIOUS);
}
@Override
public synchronized boolean moveNextRow() throws DBSIOException{
return pvMove(MOVE_DIRECTION.NEXT);
}
@Override
public synchronized boolean moveLastRow() throws DBSIOException{
return pvMove(MOVE_DIRECTION.LAST);
}
/**
* Executa o insert da tabela definida como CommandTable.<br/>
* Consulte o atributo <b>executeOnlyChangedValues</b> para outras considerações relacionadas ao insert.
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
@Override
public synchronized int executeInsert() throws DBSIOException{
if (!pvCheckColumnSize("executeInsert")){
return 0;
}
int xCount = 0;
if (this.wConnection!=null){
pvCopyDataModelFieldsValueToCommandValue(wDataModel);
if (pvFireEventBeforeInsert()){
xCount = DBSIO.executeDAOCommand(this, DBSDAO.COMMAND.INSERT, wAutoIncrementPK);
pvFireEventAfterInsert(true);
return xCount;
}
}
pvFireEventAfterInsert(false);
return xCount;
}
/**
* Exclui e retorna a quantidade de registros excluidos.<br/>
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
@Override
public synchronized final int executeDelete() throws DBSIOException{
if (!pvCheckColumnSize("executeDelete")){
return 0;
}
int xCount = 0;
if (this.wConnection!=null){
if(pvFireEventBeforeDelete()){
pvCopyDataModelFieldsValueToCommandValue(wDataModel);
xCount = DBSIO.executeDAOCommand(this, DBSDAO.COMMAND.DELETE);
pvFireEventAfterDelete(true);
return xCount;
}
}
pvFireEventAfterDelete(false);
return xCount;
}
/**
* Atualiza registro.<br/>
* Consulte o atributo <b>executeOnlyChangedValues</b> para outras considerações relacionadas ao update.<br/>
* Colunas definidas como PK não serão atualizadas.
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
@Override
public synchronized int executeUpdate() throws DBSIOException{
return executeUpdate("");
}
/**
* Atualiza registros
* @param pAdditionalSQLWhereCondition Texto da condição(sem 'WHERE') a ser adicionada a cláusula 'WHERE' que já será gerada automaticamente. <br/>
* Colunas definidas como PK não serão atualizadas.
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
public synchronized int executeUpdate(String pAdditionalSQLWhereCondition) throws DBSIOException{
if (!pvCheckColumnSize("executeUpdate")){
return 0;
}
int xCount = 0;
if (this.wConnection!=null){
//Chama evento
if (pvFireEventBeforeUpdate()){
//Copia os valores do wDataModel para os valores das colunas que efetuará o comando
pvCopyDataModelFieldsValueToCommandValue(wDataModel);
//Executa o update
xCount = DBSIO.executeDAOCommand(this, COMMAND.UPDATE, pAdditionalSQLWhereCondition);
pvFireEventAfterUpdate(true);
return xCount;
}
}
pvFireEventAfterUpdate(false);
return 0;
}
@Override
/**
* Efetua o <b>UPDATE</b> considerando a PK, e caso não tenha sido encontrado registro algum, efetua o <b>INSERT</b>.<br/>
* Isto otimiza a utilização do espaço do banco de dados em comparação um <b>DELETE</b> seguido de <b>INSERT</b>.<br/>
* Porém aumenta o tempo de processamento, já que será efetuada a tentativa de <b>UPDATE</b> antes do <b>INSERT</b>.<br/>
* Colunas definidas como PK não serão atualizadas.
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
public synchronized final int executeMerge() throws DBSIOException{
return executeMerge("");
}
/**
* Efetua o <b>UPDATE</b> considerando a PK, e caso não tenha sido encontrado registro algum, efetua o <b>INSERT</b>.<br/>
* Isto otimiza a utilização do espaço do banco de dados em comparação um <b>DELETE</b> seguido de <b>INSERT</b>.<br/>
* Porém aumenta o tempo de processamento, já que será efetuada a tentativa de <b>UPDATE</b> antes do <b>INSERT</b>.<br/>
* Colunas definidas como PK não serão atualizadas.
* @param pAdditionalSQLWhereCondition Texto da condição(sem 'WHERE') a ser adicionada a cláusula 'WHERE' que já será gerada automaticamente no caso do <b>UPDATE</b>.<br/>
* @return Quantidade de linhas afetadas
* @throws SQLException
*/
public synchronized final int executeMerge(String pAdditionalSQLWhereCondition) throws DBSIOException{
if (!pvCheckColumnSize("executeMerge")){
return 0;
}
int xN=-1;
if (this.wConnection!=null){
pvCopyDataModelFieldsValueToCommandValue(wDataModel);
if (pvFireEventBeforeMerge()){
//Savepoint xS = DBSIO.beginTrans(this.getConnection(), "EXECUTEMERGE"); //Cria savepoint interno para retornar em caso de erro já que o update pode funcionar mais o insert não
try{
setMerging(true);
xN = executeUpdate(pAdditionalSQLWhereCondition);//Atualiza registro, se existir
if (xN==0){ //Se não foi atualiza registro algum...
xN = executeInsert(); //Insere novo registro
}
if (xN<=0){ //Se nehum registro foi alterado é pq houve erro
//DBSIO.endTrans(this.getConnection(),false,xS); //ignora Update ou Insert em caso de erro. Rollback até EXECUTEMERGE
}
}catch(DBSIOException e){
throw e;
}finally{
setMerging(false);
}
pvFireEventAfterMerge(true);
return xN;
}
}
pvFireEventAfterMerge(false);
return xN;
}
/**
* Força que os valores atuais sejam os valores lidos originalmente
*/
public final void restoreValuesOriginal(){
wQueryColumns.restoreValuesOriginal();
wCommandColumns.restoreValuesOriginal();
}
/**
* Força que os valores atuais seja o default
*/
public final void restoreValuesDefault(){
wQueryColumns.restoreValuesDefault();
wCommandColumns.restoreValuesDefault();
}
/**
* Força que o valor original seja igual ao atual
*/
public final void copyValueToValueOriginal(){
wQueryColumns.copyValueToValueOriginal();
wCommandColumns.copyValueToValueOriginal();
}
/**
* Seta o registro corrente como tendo os valores iquais aos indice informado e
* prepara todos as colunas para serem enviadas no próximo <b>insert</b> ou <b>update</b>.<br/>
* Após este comando e antes do respectivo <b>insert</b> ou <b>update</b>, é possível alterar os valores pontualmente,
* <b>principalmente setar com nulo os campos que são <b>pk</b>, no caso de um insert</b>
* @param pRowIndex
* @throws DBSIOException
*/
public final void paste(int pRowIndex) throws DBSIOException{
setCurrentRowIndex(pRowIndex);
wQueryColumns.setChanged();
wCommandColumns.setChanged();
}
/**
* Prepara todos as colunas para serem enviadas no próximo <b>insert</b> ou <b>update</b>,
* considerando os valores do registro atual.<br/>
* Após este comando e antes do respectivo <b>insert</b> ou <b>update</b>, é possível alterar os valores pontualmente,
* <b>principalmente setar com nulo os campos que são <b>pk</b>, no caso de um insert</b>
* @param pRowIndex
* @throws DBSIOException
*/
public final void paste() throws DBSIOException{
setCurrentRowIndex(getCurrentRowIndex());
wQueryColumns.setChanged();
wCommandColumns.setChanged();
}
/**
* Inserir linha em branco ao resultaDataModel do DAO.<br/>
* Linha é criada somente na memória.
* @throws DBSIOException
*/
public final void insertEmptyRow() throws DBSIOException{
DBSIO.insertEmptyRow(this);
}
/**
* Cria em memória as colunas da tabela que poderão sofrer inclusão ou alteração.
* @throws SQLException
*/
private void pvCreateCommandColumns() throws DBSIOException{
ResultSet xMetaData = null;
try{
if (!DBSObject.isEmpty(wCommandTableName)){
String xColumnName;
boolean xEmpty = true;
//Excluir todas as colunas caso existam
wCommandColumns.clear();
//TODO inplementar controle de versão
//this.wHasVersionControl = false; //reseta a informação se há controle de versão durante o comando update
xMetaData = DBSIO.getTableColumnsMetaData(this.getConnection(), wCommandTableName);
//Inclui todas as colunas da tabela de comando no controle de colunas local
while (DBSIO.moveNext(xMetaData)){
xEmpty = false;
xColumnName = xMetaData.getString("COLUMN_NAME").toUpperCase().trim();
//Verificar se é uma coluna para ser ignorada e se nome de coluna já exista na lista.
if (!DBSIO.isColumnsIgnored(xColumnName)
&& !wCommandColumns.containsKey(xColumnName)){
wCommandColumns.MergeColumn(xColumnName,
DBSIO.toDataType(this.getConnection(), xMetaData.getInt("DATA_TYPE"), xMetaData.getInt("COLUMN_SIZE")),
xMetaData.getInt("COLUMN_SIZE"),
xMetaData.getObject("COLUMN_DEF"));
//TODO Controle de versão
// if (xMetaData.getString("COLUMN_NAME").toUpperCase().trim().equals(DBSSDK.IO.VERSION_COLUMN_NAME)){
// this.wHasVersionControl = true; //seta informando que existe controle de versão do update
// }
//Força a columa como sendo PK conforme a informação passada pelo usuário
if (pvIsPK(xColumnName)){
if (wCommandColumns.containsKey(xColumnName)){
wCommandColumns.getColumn(xColumnName).setPK(true);
}else{
//Se não encontrou a coluna informada, considera que a tabela não possui auto-incremento.
setAutoIncrementPK(false);
}
}
}
}
if (xEmpty){
wLogger.error("Não foi encontrada a tabela " + wCommandTableName + ". Verifique o acesso, o nome e questões de letra maiúscula/minúscula.");
}
//Se a PK não foi definida, busca definição na própria tabela
if (wPKs.length==0){
//Configura quais as colunas são PK
List<String> xPKs = DBSIO.getPrimaryKeys(this.getConnection(), wCommandTableName);
String xPK = "";
if (xPKs != null){
for (int x=0; x<=xPKs.size()-1;x++){
wCommandColumns.getColumn(xPKs.get(x)).setPK(true);
//Define a lista de colunas(wPK) que são PK para serem utilizadas como UK caso o usuario não informe. Isso � imporante para identificar o linha �nica no dataTable
if (!xPK.equals("")){
xPK += ",";
//Se houver mais de uma coluna como PK, considera que a tabela não possui auto-incremento.
setAutoIncrementPK(false);
}
xPK += wCommandColumns.getColumn(xPKs.get(x)).getColumnName();
}
}
pvSetPK(xPK);
}
//Se a PK foi definida e possui mais de uma coluna ou não possui pk, considera que a tabela não possui auto-incremento.
if (wPKs.length != 1) {
this.setAutoIncrementPK(false);
}
}
}catch (SQLException e){
DBSIO.throwIOException(wCommandTableName, e, wConnection);
}finally{
DBSIO.closeResultSet(xMetaData);
}
}
/**
* Cria, em memória, as colunas a partir das colunas informadas no consulta(wQuerySQL),
* para posteriormente serem utilizadas para verificar sua existência.
* @throws SQLException
*/
private void pvCreateSelectColumns(ResultSetMetaData pResultSetMetaData) throws DBSIOException{
try{
wQueryColumns.clear(); //Excluir todas as colunas caso existam
//Inclui todas as colunas da pesquina no control de colunas local
DBSColumn xColumn = null;
String xColumnName = "";
for (int x=1; x<=pResultSetMetaData.getColumnCount();x++){
xColumnName = pResultSetMetaData.getColumnLabel(x).toUpperCase().trim();
wQueryColumns.MergeColumn(xColumnName,
DBSIO.toDataType(this.getConnection(), pResultSetMetaData.getColumnType(x), pResultSetMetaData.getPrecision(x)),
pResultSetMetaData.getColumnDisplaySize(x),
null);
xColumn = wQueryColumns.getColumn(xColumnName);
//Seta o título padrão do cabeçalho com o mesmo nome da coluna no select(considerando o AS)
xColumn.setDisplayColumnName(DBSString.toProper(xColumnName));
//Seta o tamanho do campo conforme o tamanho da coluna na tabela
xColumn.setDisplaySize(pResultSetMetaData.getColumnDisplaySize(x));
//System.out.println(pResultSetMetaData.getColumnLabel(x).toUpperCase().trim() + ":" + pResultSetMetaData.isAutoIncrement(x));
//Seta coluna como PK caso faça parte da lista de colunas definida na UK informada pelo usuário
if (pvIsUK(xColumnName)){
xColumn.setPK(true);
}
xColumn.setAutoIncrement(pResultSetMetaData.isAutoIncrement(x));
//Seta coluna UK criada
if (xColumnName.equals(UKName)){
wQueryColumns.getColumn(UKName).setDisplayColumn(false);
}
}
}catch(SQLException e){
DBSIO.throwIOException(e, wConnection);
}
}
/**
* Salva os valores do ResultSet nas variáveis locais e ativa indicador que existe um novo registro corrente.
* @throws DBSIOException
*/
private void pvSetRowPositionChanged() throws DBSIOException{
super.setRowPositionChanged(true);
}
/**
* Salva os valores do ResultSet nas variáveis locais e dispara os eventos beforeRead e afterRead.
* @throws DBSIOException
*/
private void pvCopyValueFromResultDataModel(int pRowIndex) throws DBSIOException{
// if (getResultDataModel()==null){
// return;
// }
// getResultDataModel().setRowIndex(pRowIndex);
if (pRowIndex != wCurrentRowIndex){
wCurrentRowIndex = pRowIndex;
pvSetRowPositionChanged();
//Atualiza valores atuais e reseta valor original
pvFireEventBeforeRead();
if (!wResultDataModel.isRowAvailable()){
restoreValuesDefault();
}else{
for (DBSColumn xColumn: wQueryColumns.getColumns()){
this.setValue(xColumn.getColumnName(), pvGetResultDataModelValueConvertedToDataType(xColumn.getColumnName(), xColumn.getDataType()), true);
}
}
pvFireEventAfterRead(true);
}
}
//*****************************************************************************************************
// PRIVATE
//*****************************************************************************************************
/**
* Retorna valor do resultset convertido para o tipo informado em <b>pDataType</b>
* @param pColumnName
* @param pDataType
* @return
* @throws DBSIOException
*/
@SuppressWarnings("unchecked")
private <A> A pvGetResultDataModelValueConvertedToDataType(String pColumnName, DATATYPE pDataType) throws DBSIOException{
return (A) DBSIO.getDataTypeConvertedValue(pDataType, wResultDataModel.getRowData().get(pColumnName)); //TODO
}
private boolean pvMove(MOVE_DIRECTION pDirection) throws DBSIOException {
boolean xB = false;
if (wConnection!=null){
//Força para que o rowindex do resultaset seja o mesmo que foi utilizado para armazenar os dados
//Se for para chmara o evento
xB = pvFireEventBeforeMove();
int xRowIndex = 0;
if (xB){
if (pDirection == MOVE_DIRECTION.BEFORE_FIRST){
restoreValuesDefault();
pvCreateDataModel();
}
xRowIndex = DBSIO.getIndexAfterMove(getCurrentRowIndex(), getRowsCount(), pDirection);
if (DBSIO.getIndexAfterMoveIsOk(getCurrentRowIndex(), xRowIndex, getRowsCount(), pDirection)){
xB = setCurrentRowIndex(xRowIndex);
}else{
xB = false;
}
pvFireEventAfterMove(xB);
}
}
return xB;
}
/**
* @return Retorna as colunas utilizadas como UK, já concatenadas.
*/
private String pvGetUKConcatenaded(){
String xS = DBSString.changeStr(wUK, " ", "");
return DBSString.changeStr(xS, ",", " || ");
}
/**
* Configura a chave que identificará a PK da tabela que sofrerá a edição.<br/>
* Caso a chave seja composta por mais de uma coluna, as colunas deverão estar separadas por vírgula.
* @param pUK
*/
private void pvSetPK(String pPK){
this.wPK = pvCreatePKString(pPK, this.wCommandTableName); //Incluido nome da tabela na formação da chave /23/07/2013
this.wPKs = pvCreatePKArray(pPK);
}
/**
* Configura a chave que identificará a UK dos registros, podendo ser composta por colunas de mais de uma tabela ou <b>alias</b>.<br/>
* Caso a chave seja composta por mais de uma coluna, as colunas deverão estar separadas por vírgula.
* @param pUK
*/
private void pvSetUK(String pUK){
this.wUK = pvCreatePKString(pUK, null);
this.wUKs = pvCreatePKArray(pUK);
}
/**
* Cria String da PK a partir da PK informada, padronizando o conteúdo.
* @param pPK
* @return
*/
private String pvCreatePKString(String pPK, String pTableName){
String xTableAlias = pTableName;
if (!DBSObject.isEmpty(wQuerySQL) && !DBSObject.isEmpty(xTableAlias)){
//Retorna o nome da tabela ou alias se existir.
xTableAlias = DBSIO.getTableFromQuery(wQuerySQL, true, xTableAlias);
}
if (DBSObject.isEmpty(pPK)){
return "";
}
String xPK = pPK.trim().toUpperCase();
String[] xPKs;
if (pTableName == null){
pTableName = "";
}else{
pTableName = xTableAlias.trim().toUpperCase() + ".";
}
xPK = DBSString.changeStr(xPK, ",", " ");
//Exclui o nome da tabela da chave, caso tenha sido incluida pelo usuário, já que será obrigatóriamente adicionada no código abaixo
xPK = DBSString.changeStr(xPK, pTableName, "");
xPKs = xPK.split("\\s+");
xPK = "";
for (String xP: xPKs){
if (!xPK.equals("")){
xPK = xPK + ",";
}
xPK = xPK + pTableName + xP.trim();
}
if (xPK.equals("")){
return xPK;
}else{
return xPK.trim();
}
}
/**
* Retorna array das chaves(PK/UK) desconsiderando o nome da tabela, se houver.
* @param pPK
* @return
*/
private static String[] pvCreatePKArray(String pPK){
if (pPK == null ||
pPK.equals("")){
return new String[]{};
}
String[] xPKs;
int xN;
xPKs = pPK.split(",");
for (int xI = 0; xI < xPKs.length; xI++){
xPKs[xI] = xPKs[xI].trim();
xN = xPKs[xI].lastIndexOf(".");
if (xN != -1){
xPKs[xI] = xPKs[xI].substring(xN+1);
}
}
return xPKs;
}
/**
* Retorna se coluna informada é uma UK.
* @param pColumnName
* @return
*/
private boolean pvIsUK(String pColumnName){
pColumnName = pColumnName.trim().toUpperCase();
return (DBSString.findStringInArray(wUKs, pColumnName) > -1);
}
/**
* Retorna se coluna informada é uma PK.
* @param pColumnName
* @return
*/
private boolean pvIsPK(String pColumnName){
pColumnName = pColumnName.trim().toUpperCase();
return (DBSString.findStringInArray(wPKs, pColumnName) > -1);
}
/**
* Retorna o nome da coluna em caixa alta, trim e sem o nome date tabela, se houver.
* @param pColumnName
* @return
*/
private String pvGetColumnName(String pColumnName){
if (pColumnName == null){
return "";
}
pColumnName = pColumnName.toUpperCase().trim();
int xI = pColumnName.indexOf(".");
if (xI > 0){
return pColumnName.substring(xI + 1);
}else{
return pColumnName;
}
}
@SuppressWarnings("unchecked")
@Override
protected final synchronized <A> List<A> pvGetList(boolean pReturnListDataModel) throws DBSIOException {
//Executa a Select para recuperar os registros
if (pReturnListDataModel &&
wDataModelClass == null){
return null;
}
List<DBSRow> xListRow = new ArrayList<DBSRow>();
List<DataModelClass> xListDataModel = new ArrayList<DataModelClass>();
//Cria nova linha
DBSRow xColumns = null;
//objeto com base no DataModel
DataModelClass xDataModel = null;
//Popula o list com todos os registros do resultset
if (pReturnListDataModel){
xListDataModel.clear();
}else{
xListRow.clear();
}
//Loop de todos os registros do resulset
moveBeforeFirstRow();
while (moveNextRow()){
if (pReturnListDataModel){
//Cria novo objeto com base no DataModel
xDataModel = this.createDataModel();
}else{
//Cria nova linha
xColumns = new DBSRow();
}
// //Recuper o conteúdo de todas as colunas do registro corrente
for (int x=1; x< getColumns().size(); x++){
if (!pReturnListDataModel){
xColumns.MergeColumn(getColumn(x).getColumnName(),
getColumn(x).getDataType(),
getColumn(x).getDisplaySize(),
null);
//Copia o valor para a coluna da linha
xColumns.setValue(getColumn(x).getColumnName(), pvGetResultDataModelValueConvertedToDataType(getColumn(x).getColumnName(), getColumn(x).getDataType()));
}
//Adiciona a coluna a linha
if (xDataModel!=null){
//Copia o valor para o respectivo atributo no DataModel(Se houver)
pvSetDataModelValue(xDataModel, getColumn(x).getColumnName(), pvGetResultDataModelValueConvertedToDataType(getColumn(x).getColumnName(), getColumn(x).getDataType()));
}
}
//Adiciona linha como DataModel
if (pReturnListDataModel){
xListDataModel.add(xDataModel);
}else{
//Adiciona linha como DBSRow
xListRow.add(xColumns);
}
}
if (!moveFirstRow()) {
moveBeforeFirstRow();
}
if (pReturnListDataModel){
return (List<A>) xListDataModel;
}else{
//Adiciona linha como DBSRow
return (List<A>) xListRow;
}
}
/**
* Verifica se foi definida a tabela para edição e respectivas colunas
* @param pMethodName
* @return
*/
private boolean pvCheckColumnSize(String pMethodName){
if (wCommandColumns.size() == 0){
if (DBSObject.isEmpty(getCommandTableName())){
wLogger.error("DBSDAO:" + pMethodName + ": Não foi informada a tabela que sofrerá a edição.");
}else{
wLogger.error("DBSDAO:" + pMethodName + ": Não foram encontradas colunas para efetuar a edição.[" + getCommandTableName() + "]");
}
return false;
}
return true;
}
//=========================================================================================================
//Overrides
//=========================================================================================================
@Override
public void beforeOpen(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterOpen(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeInsert(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterInsert(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeRead(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterRead(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeUpdate(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterUpdate(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeMerge(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterMerge(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeDelete(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterDelete(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeMove(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterMove(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void beforeClose(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
@Override
public void afterClose(DBSDAOEvent pEvent) {
// Manter vazio. Quem extender esta classe fica responsável de sobreescrever este métodos, caso precise
}
}
|
Alteração do nome do método getDataTypeConvertedValue para
toDataTypeValue
|
src/main/java/br/com/dbsoft/io/DBSDAO.java
|
Alteração do nome do método getDataTypeConvertedValue para toDataTypeValue
|
|
Java
|
mit
|
04bd25d7c42bd088e3b154cae7a2fac17985c7f1
| 0
|
MinecraftPortCentral/GriefPrevention
|
/*
* This file is part of GriefPrevention, licensed under the MIT License (MIT).
*
* Copyright (c) bloodmc
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package me.ryanhamshire.griefprevention.util;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.tileentity.TileEntity;
import org.spongepowered.api.entity.living.Living;
import org.spongepowered.api.entity.living.player.User;
import org.spongepowered.api.event.Event;
import org.spongepowered.api.event.cause.Cause;
import org.spongepowered.api.event.cause.EventContext;
import org.spongepowered.api.event.cause.EventContextKeys;
import org.spongepowered.api.event.world.ExplosionEvent;
import org.spongepowered.common.SpongeImplHooks;
public class CauseContextHelper {
public static User getEventUser(Event event) {
final Cause cause = event.getCause();
final EventContext context = event.getContext();
// Don't attempt to set user for leaf decay
if (context.containsKey(EventContextKeys.LEAVES_DECAY)) {
return null;
}
User user = null;
User fakePlayer = null;
if (cause != null) {
user = cause.first(User.class).orElse(null);
if (user != null && user instanceof EntityPlayer && SpongeImplHooks.isFakePlayer((EntityPlayer) user)) {
fakePlayer = user;
}
}
// Only check notifier for fire spread
if (context.containsKey(EventContextKeys.FIRE_SPREAD)) {
return context.get(EventContextKeys.NOTIFIER).orElse(null);
}
if (user == null || fakePlayer != null) {
// Always use owner for ticking TE's
// See issue MinecraftPortCentral/GriefPrevention#610 for more information
if (cause.containsType(TileEntity.class)) {
user = context.get(EventContextKeys.OWNER)
.orElse(context.get(EventContextKeys.NOTIFIER)
.orElse(context.get(EventContextKeys.CREATOR)
.orElse(null)));
} else {
user = context.get(EventContextKeys.NOTIFIER)
.orElse(context.get(EventContextKeys.OWNER)
.orElse(context.get(EventContextKeys.CREATOR)
.orElse(null)));
}
}
if (user == null) {
// fall back to fakeplayer if we still don't have a user
user = fakePlayer;
if (event instanceof ExplosionEvent) {
// Check igniter
final Living living = context.get(EventContextKeys.IGNITER).orElse(null);
if (living != null && living instanceof User) {
user = (User) living;
}
}
}
return user;
}
}
|
src/main/java/me/ryanhamshire/griefprevention/util/CauseContextHelper.java
|
/*
* This file is part of GriefPrevention, licensed under the MIT License (MIT).
*
* Copyright (c) bloodmc
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package me.ryanhamshire.griefprevention.util;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.tileentity.TileEntity;
import org.spongepowered.api.entity.living.Living;
import org.spongepowered.api.entity.living.player.User;
import org.spongepowered.api.event.Event;
import org.spongepowered.api.event.cause.Cause;
import org.spongepowered.api.event.cause.EventContext;
import org.spongepowered.api.event.cause.EventContextKeys;
import org.spongepowered.api.event.world.ExplosionEvent;
import org.spongepowered.common.SpongeImplHooks;
public class CauseContextHelper {
public static User getEventUser(Event event) {
final Cause cause = event.getCause();
final EventContext context = event.getContext();
// Don't attempt to set user for leaf decay
if (context.containsKey(EventContextKeys.LEAVES_DECAY)) {
return null;
}
User user = null;
if (cause != null) {
user = cause.first(User.class).orElse(null);
if (user != null && user instanceof EntityPlayer && SpongeImplHooks.isFakePlayer((EntityPlayer) user) && user.getName().startsWith("[")) {
user = null;
}
}
// Only check notifier for fire spread
if (context.containsKey(EventContextKeys.FIRE_SPREAD)) {
return context.get(EventContextKeys.NOTIFIER).orElse(null);
}
if (user == null) {
// Always use owner for ticking TE's
// See issue MinecraftPortCentral/GriefPrevention#610 for more information
if (cause.root() instanceof TileEntity) {
user = context.get(EventContextKeys.OWNER)
.orElse(context.get(EventContextKeys.NOTIFIER)
.orElse(context.get(EventContextKeys.CREATOR)
.orElse(null)));
} else {
user = context.get(EventContextKeys.NOTIFIER)
.orElse(context.get(EventContextKeys.OWNER)
.orElse(context.get(EventContextKeys.CREATOR)
.orElse(null)));
}
}
if (user == null) {
if (event instanceof ExplosionEvent) {
// Check igniter
final Living living = context.get(EventContextKeys.IGNITER).orElse(null);
if (living != null && living instanceof User) {
user = (User) living;
}
}
}
return user;
}
}
|
Fix TE owner check when a fakeplayer is involved. Fixes #831
|
src/main/java/me/ryanhamshire/griefprevention/util/CauseContextHelper.java
|
Fix TE owner check when a fakeplayer is involved. Fixes #831
|
|
Java
|
mit
|
cdfb95d21e88370d7778f4e027d8c9d884617df7
| 0
|
Backendless/Android-SDK,Backendless/Android-SDK,gwokudasam/Android-SDK
|
/*
* ********************************************************************************************************************
* <p/>
* BACKENDLESS.COM CONFIDENTIAL
* <p/>
* ********************************************************************************************************************
* <p/>
* Copyright 2012 BACKENDLESS.COM. All Rights Reserved.
* <p/>
* NOTICE: All information contained herein is, and remains the property of Backendless.com and its suppliers,
* if any. The intellectual and technical concepts contained herein are proprietary to Backendless.com and its
* suppliers and may be covered by U.S. and Foreign Patents, patents in process, and are protected by trade secret
* or copyright law. Dissemination of this information or reproduction of this material is strictly forbidden
* unless prior written permission is obtained from Backendless.com.
* <p/>
* ********************************************************************************************************************
*/
package com.backendless;
import com.backendless.async.callback.AsyncCallback;
import com.backendless.async.message.AsyncMessage;
import com.backendless.cache.CacheService;
import com.backendless.cache.ICache;
import com.backendless.core.ResponseCarrier;
import com.backendless.core.responder.AdaptingResponder;
import com.backendless.core.responder.policy.PoJoAdaptingPolicy;
import com.backendless.exceptions.BackendlessException;
import com.backendless.exceptions.BackendlessFault;
import com.backendless.utils.ReflectionUtil;
import weborb.client.IChainedResponder;
import weborb.types.IAdaptingType;
import weborb.util.io.ISerializer;
import java.lang.reflect.Type;
import java.util.Date;
public class Cache
{
private final static String CACHE_SERVER_ALIAS = "com.backendless.services.redis.CacheService";
private final static Cache instance = new Cache();
static Cache getInstance()
{
return instance;
}
private Cache()
{
}
public <T> ICache<T> with( String key, Class<? extends T> type )
{
return new CacheService<T>( type, key );
}
public void put( String key, Object object, int timeToLive, AsyncCallback<Object> callback )
{
byte[] bytes = serialize( object );
Invoker.invokeAsync( CACHE_SERVER_ALIAS, "putBytes", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, bytes, timeToLive }, callback );
}
public void put( String key, Object object, AsyncCallback<Object> callback )
{
put( key, object, 0, callback );
}
public void put( String key, Object object )
{
put( key, object, 0 );
}
public void put( String key, Object object, int timeToLive )
{
byte[] bytes = serialize( object );
Invoker.invokeSync( CACHE_SERVER_ALIAS, "putBytes", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, bytes, timeToLive }, getChainedResponder() );
}
public <T> T get( String key, Class<? extends T> type )
{
byte[] bytes = Invoker.invokeSync( CACHE_SERVER_ALIAS, "getBytes", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key }, new AdaptingResponder<byte[]>( byte[].class, new PoJoAdaptingPolicy<byte[]>() ) );
if( bytes == null )
return null;
Class argType = type.getClass();
return (T) deserialize( bytes, argType );
}
public <T> void get( final String key, final AsyncCallback<T> callback )
{
final Type asyncCallbackType = ReflectionUtil.getCallbackGenericType( callback );
ThreadPoolService.getPoolExecutor().execute( new Runnable()
{
@Override
public void run()
{
try
{
T result = (T) get( key, (Class) asyncCallbackType );
ResponseCarrier.getInstance().deliverMessage( new AsyncMessage<T>( result, callback ) );
}
catch( BackendlessException e )
{
ResponseCarrier.getInstance().deliverMessage( new AsyncMessage<T>( new BackendlessFault( e ), callback ) );
}
}
} );
}
public Boolean contains( String key )
{
return Invoker.invokeSync( CACHE_SERVER_ALIAS, "containsKey", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key }, getChainedResponder() );
}
public void contains( String key, AsyncCallback<Boolean> callback )
{
Invoker.invokeAsync( CACHE_SERVER_ALIAS, "containsKey", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key }, callback );
}
public void expireIn( String key, int seconds )
{
Invoker.invokeSync( CACHE_SERVER_ALIAS, "expireIn", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, seconds }, getChainedResponder() );
}
public void expireIn( String key, int seconds, AsyncCallback<Object> callback )
{
Invoker.invokeAsync( CACHE_SERVER_ALIAS, "expireIn", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, seconds }, callback );
}
public void expireAt( String key, Date date )
{
expireAt( key, date.getTime() );
}
public void expireAt( String key, long timestamp )
{
Invoker.invokeSync( CACHE_SERVER_ALIAS, "expireAt", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, timestamp }, getChainedResponder() );
}
public void expireAt( String key, Date date, AsyncCallback<Object> callback )
{
expireAt( key, date.getTime(), callback );
}
public void expireAt( String key, long timestamp, AsyncCallback<Object> callback )
{
Invoker.invokeAsync( CACHE_SERVER_ALIAS, "expireAt", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, timestamp }, callback );
}
public void delete( String key )
{
Invoker.invokeSync( CACHE_SERVER_ALIAS, "delete", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key }, getChainedResponder() );
}
public void delete( final String key, final AsyncCallback<Object> callback )
{
Invoker.invokeAsync( CACHE_SERVER_ALIAS, "delete", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key }, callback );
}
private static <T> IChainedResponder getChainedResponder()
{
return new AdaptingResponder<T>();
}
private static Object deserialize( byte[] bytes, Type type )
{
Object object = null;
try
{
object = weborb.util.io.Serializer.fromBytes( bytes, ISerializer.AMF3, false );
if( object instanceof IAdaptingType )
return type == null ? ((IAdaptingType) object).defaultAdapt() : ((IAdaptingType) object).adapt( type );
}
catch( Exception e )
{
throw new BackendlessException( e );
}
return object;
}
private static byte[] serialize( Object object )
{
byte[] bytes;
try
{
bytes = weborb.util.io.Serializer.toBytes( object, ISerializer.AMF3 );
}
catch( Exception e )
{
throw new BackendlessException( e );
}
return bytes;
}
}
|
src/com/backendless/Cache.java
|
/*
* ********************************************************************************************************************
* <p/>
* BACKENDLESS.COM CONFIDENTIAL
* <p/>
* ********************************************************************************************************************
* <p/>
* Copyright 2012 BACKENDLESS.COM. All Rights Reserved.
* <p/>
* NOTICE: All information contained herein is, and remains the property of Backendless.com and its suppliers,
* if any. The intellectual and technical concepts contained herein are proprietary to Backendless.com and its
* suppliers and may be covered by U.S. and Foreign Patents, patents in process, and are protected by trade secret
* or copyright law. Dissemination of this information or reproduction of this material is strictly forbidden
* unless prior written permission is obtained from Backendless.com.
* <p/>
* ********************************************************************************************************************
*/
package com.backendless;
import com.backendless.async.callback.AsyncCallback;
import com.backendless.async.message.AsyncMessage;
import com.backendless.cache.CacheService;
import com.backendless.cache.ICache;
import com.backendless.core.ResponseCarrier;
import com.backendless.core.responder.AdaptingResponder;
import com.backendless.core.responder.policy.PoJoAdaptingPolicy;
import com.backendless.exceptions.BackendlessException;
import com.backendless.exceptions.BackendlessFault;
import com.backendless.utils.ReflectionUtil;
import weborb.client.IChainedResponder;
import weborb.types.IAdaptingType;
import weborb.util.io.ISerializer;
import java.lang.reflect.Type;
import java.util.Date;
public class Cache
{
private final static String CACHE_SERVER_ALIAS = "com.backendless.services.redis.CacheService";
private final static Cache instance = new Cache();
static Cache getInstance()
{
return instance;
}
private Cache()
{
}
public <T> ICache<T> with( String key, Class<? extends T> type )
{
return new CacheService<T>( type, key );
}
public void put( String key, Object object, int timeToLive, AsyncCallback<Object> callback )
{
byte[] bytes = serialize( object );
Invoker.invokeAsync( CACHE_SERVER_ALIAS, "putBytes", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, bytes, timeToLive }, callback );
}
public void put( String key, Object object, AsyncCallback<Object> callback )
{
put( key, object, 0, callback );
}
public void put( String key, Object object )
{
put( key, object, 0 );
}
public void put( String key, Object object, int timeToLive )
{
byte[] bytes = serialize( object );
Invoker.invokeSync( CACHE_SERVER_ALIAS, "putBytes", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, bytes, timeToLive }, getChainedResponder() );
}
public <T> T get( String key, Class<? extends T> type )
{
byte[] bytes = Invoker.invokeSync( CACHE_SERVER_ALIAS, "getBytes", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key }, new AdaptingResponder<byte[]>( byte[].class, new PoJoAdaptingPolicy<byte[]>() ) );
if( bytes == null )
return null;
Class argType = type.getClass();
return (T) deserialize( bytes, argType );
}
public <T> void get( final String key, final AsyncCallback<T> callback )
{
final Type asyncCallbackType = ReflectionUtil.getCallbackGenericType( callback );
ThreadPoolService.getPoolExecutor().execute( new Runnable()
{
@Override
public void run()
{
try
{
T result = (T) get( key, (Class) asyncCallbackType );
ResponseCarrier.getInstance().deliverMessage( new AsyncMessage<T>( result, callback ) );
}
catch( BackendlessException e )
{
ResponseCarrier.getInstance().deliverMessage( new AsyncMessage<T>( new BackendlessFault( e ), callback ) );
}
}
} );
}
public Boolean contains( String key )
{
return Invoker.invokeSync( CACHE_SERVER_ALIAS, "containsKey", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key }, getChainedResponder() );
}
public void contains( String key, AsyncCallback<Boolean> callback )
{
Invoker.invokeAsync( CACHE_SERVER_ALIAS, "containsKey", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key }, callback );
}
public void expireIn( String key, int seconds )
{
Invoker.invokeSync( CACHE_SERVER_ALIAS, "expireIn", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, seconds }, getChainedResponder() );
}
public void expireIn( String key, int seconds, AsyncCallback<Object> callback )
{
Invoker.invokeAsync( CACHE_SERVER_ALIAS, "expireIn", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, seconds }, callback );
}
public void expireAt( String key, Date date )
{
expireAt( key, date.getTime() );
}
public void expireAt( String key, long timestamp )
{
timestamp /= 1000;
Invoker.invokeSync( CACHE_SERVER_ALIAS, "expireAt", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, timestamp }, getChainedResponder() );
}
public void expireAt( String key, Date date, AsyncCallback<Object> callback )
{
expireAt( key, date.getTime(), callback );
}
public void expireAt( String key, long timestamp, AsyncCallback<Object> callback )
{
timestamp /= 1000;
Invoker.invokeAsync( CACHE_SERVER_ALIAS, "expireAt", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key, timestamp }, callback );
}
public void delete( String key )
{
Invoker.invokeSync( CACHE_SERVER_ALIAS, "delete", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key }, getChainedResponder() );
}
public void delete( final String key, final AsyncCallback<Object> callback )
{
Invoker.invokeAsync( CACHE_SERVER_ALIAS, "delete", new Object[] { Backendless.getApplicationId(), Backendless.getVersion(), key }, callback );
}
private static <T> IChainedResponder getChainedResponder()
{
return new AdaptingResponder<T>();
}
private static Object deserialize( byte[] bytes, Type type )
{
Object object = null;
try
{
object = weborb.util.io.Serializer.fromBytes( bytes, ISerializer.AMF3, false );
if( object instanceof IAdaptingType )
return type == null ? ((IAdaptingType) object).defaultAdapt() : ((IAdaptingType) object).adapt( type );
}
catch( Exception e )
{
throw new BackendlessException( e );
}
return object;
}
private static byte[] serialize( Object object )
{
byte[] bytes;
try
{
bytes = weborb.util.io.Serializer.toBytes( object, ISerializer.AMF3 );
}
catch( Exception e )
{
throw new BackendlessException( e );
}
return bytes;
}
}
|
changed timestamp calculation - removed division by 1000
|
src/com/backendless/Cache.java
|
changed timestamp calculation - removed division by 1000
|
|
Java
|
lgpl-2.1
|
355ef975e0c76f5dfcf7749e753569c1e7fea696
| 0
|
sbliven/biojava,sbliven/biojava,sbliven/biojava
|
/*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. If you do not have a copy,
* see:
*
* http://www.gnu.org/copyleft/lesser.html
*
* Copyright for this code is held jointly by the individual
* authors. These should be listed in @author doc comments.
*
* For more information on the BioJava project and its aims,
* or to join the biojava-l mailing list, visit the home page
* at:
*
* http://www.biojava.org/
*
* Created on 26.04.2004
* @author Andreas Prlic
*
*/
package org.biojava.bio.structure.io;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.biojava.bio.structure.Atom;
import org.biojava.bio.structure.Chain;
import org.biojava.bio.structure.DBRef;
import org.biojava.bio.structure.Element;
import org.biojava.bio.structure.Group;
import org.biojava.bio.structure.PDBHeader;
import org.biojava.bio.structure.SSBond;
import org.biojava.bio.structure.Site;
import org.biojava.bio.structure.Structure;
import org.biojava.bio.structure.StructureException;
import org.biojava3.core.util.XMLWriter;
/** Methods to convert a structure object into different file formats.
* @author Andreas Prlic
* @since 1.4
*/
public class FileConvert {
Structure structure ;
boolean printConnections;
// Locale should be english, e.g. in DE separator is "," -> PDB files have "." !
static DecimalFormat d3 = (DecimalFormat)NumberFormat.getInstance(java.util.Locale.UK);
static {
d3.setMaximumIntegerDigits(3);
d3.setMinimumFractionDigits(3);
d3.setMaximumFractionDigits(3);
}
static DecimalFormat d2 = (DecimalFormat)NumberFormat.getInstance(java.util.Locale.UK);
static {
d2.setMaximumIntegerDigits(2);
d2.setMinimumFractionDigits(2);
d2.setMaximumFractionDigits(2);
}
private static final String newline = System.getProperty("line.separator");
/**
* Constructs a FileConvert object.
*
* @param struc a Structure object
*/
public FileConvert(Structure struc) {
structure = struc ;
printConnections = true;
}
/** align a string to the right
* length is the total length the new string should take, inlcuding spaces on the left
* incredible that this tool is missing in java !!!
*/
private static String alignRight(String input, int length){
int n = input.length();
if ( n >= length)
return input;
String spaces = " " ;
int diff = length - n ;
StringBuffer s = new StringBuffer();
s.append(spaces.substring(0,diff));
s.append(input);
return s.toString();
}
private static String alignLeft(String input, int length){
if (input.length() >= length) {
return input;
}
String spaces = " " ;
input += spaces.substring(0, length - input.length() );
return input;
}
/** returns if the Connections should be added
* default is true;
* @return if the printConnections flag is set
*/
public boolean doPrintConnections() {
return printConnections;
}
/** enable/disable printing of connections
* connections are sometimes buggy in PDB files
* so there are some cases where one might turn this off.
* @param printConnections
*/
public void setPrintConnections(boolean printConnections) {
this.printConnections = printConnections;
}
/** prints the connections in PDB style
*
* Thanks to Tamas Horvath for this one
*/
private String printPDBConnections(){
StringBuffer str = new StringBuffer();
List<Map<String, Integer>> cons = structure.getConnections();
for (int cnr = 0; cnr<cons.size();cnr++){
Map<String,Integer> con = cons.get(cnr);
Integer as = (Integer) con.get("atomserial");
String atomserial = "";
String bond1 = "";
String bond2 = "";
String bond3 = "";
String bond4 = "";
String hyd1 = "";
String hyd2 = "";
String salt1 = "";
String hyd3 = "";
String hyd4 = "";
String salt2 = "";
if (con.containsKey("bond1")) bond1 = con.get("bond1").toString();
if (con.containsKey("bond2")) bond2 = con.get("bond2").toString();
if (con.containsKey("bond3")) bond3 = con.get("bond3").toString();
if (con.containsKey("bond4")) bond4 = con.get("bond4").toString();
if (con.containsKey("hyd1")) hyd1 = con.get("hyd1").toString();
if (con.containsKey("hyd2")) hyd2 = con.get("hyd2").toString();
if (con.containsKey("salt1")) salt1 = con.get("salt1").toString();
if (con.containsKey("hyd3")) hyd3 = con.get("hyd3").toString();
if (con.containsKey("hyd4")) hyd4 = con.get("hyd4").toString();
if (con.containsKey("salt2")) salt2 = con.get("salt2").toString();
atomserial = alignRight(""+as,5) ;
bond1 = alignRight(bond1,5) ;
bond2 = alignRight(bond2,5) ;
bond3 = alignRight(bond3,5) ;
bond4 = alignRight(bond4,5) ;
hyd1 = alignRight(hyd1,5) ;
hyd2 = alignRight(hyd2,5) ;
salt1 = alignRight(salt1,5) ;
hyd3 = alignRight(hyd3,5) ;
hyd4 = alignRight(hyd4,5) ;
salt2 = alignRight(salt2,5) ;
String connectLine = "CONECT" + atomserial + bond1 + bond2 + bond3 +
bond4 + hyd1 + hyd2 + salt1 + hyd3 + hyd4 + salt2;
str.append(connectLine).append(newline);
}
return str.toString();
}
/** Convert a structure into a PDB file.
* @return a String representing a PDB file.
*/
public String toPDB() {
StringBuffer str = new StringBuffer();
//int i = 0 ;
// TODO: print all the PDB header informaton in PDB style
// some objects (PDBHeader, Compound) are still missing
//
PDBHeader header = structure.getPDBHeader();
header.toPDB(str);
//REMARK 800
if (!structure.getSites().isEmpty()) {
str.append("REMARK 800 ").append(newline);
str.append("REMARK 800 SITE ").append(newline);
for (Site site : structure.getSites()) {
site.remark800toPDB(str);
}
}
//DBREF
for (DBRef dbref : structure.getDBRefs()){
dbref.toPDB(str);
str.append(newline);
}
//SSBOND
for (SSBond ssbond : structure.getSSBonds()){
ssbond.toPDB(str);
str.append(newline);
}
//SITE
for (Site site : structure.getSites()) {
try {
site.toPDB(str);
} catch (Exception e){
e.printStackTrace();
}
}
//
// print the atom records
//
// do for all models
int nrModels = structure.nrModels() ;
if ( structure.isNmr()) {
str.append("EXPDTA NMR, "+ nrModels+" STRUCTURES"+newline) ;
}
for (int m = 0 ; m < nrModels ; m++) {
List<Chain> model = structure.getModel(m);
// todo support NMR structures ...
if ( structure.isNmr()) {
str.append("MODEL " + (m+1)+ newline);
}
// do for all chains
int nrChains = model.size();
for ( int c =0; c<nrChains;c++) {
Chain chain = model.get(c);
//String chainID = chain.getChainID();
//if ( chainID.equals(DEFAULTCHAIN) ) chainID = " ";
// do for all groups
int nrGroups = chain.getAtomLength();
for ( int h=0; h<nrGroups;h++){
Group g= chain.getAtomGroup(h);
toPDB(g,str);
}
}
if ( structure.isNmr()) {
str.append("ENDMDL").append(newline);
}
}
if ( doPrintConnections() )
str.append(printPDBConnections());
return str.toString() ;
}
private void toPDB(Group g, StringBuffer str) {
// iterate over all atoms ...
// format output ...
int groupsize = g.size();
for ( int atompos = 0 ; atompos < groupsize; atompos++) {
Atom a = null ;
try {
a = g.getAtom(atompos);
} catch ( StructureException e) {
System.err.println(e);
continue ;
}
toPDB(a, str);
//line = record + serial + " " + fullname +altLoc
//+ leftResName + " " + chainID + resseq
//+ " " + x+y+z
//+ occupancy + tempfactor;
//str.append(line + newline);
//System.out.println(line);
}
if ( g.hasAltLoc()){
for (Group alt : g.getAltLocs() ) {
toPDB(alt,str);
}
}
}
/** Prints the content of an Atom object as a PDB formatted line.
*
* @param a
* @return
*/
public static String toPDB(Atom a){
StringBuffer w = new StringBuffer();
toPDB(a,w);
return w.toString();
}
/**
* print ATOM record in the following syntax
<pre>
ATOM 1 N ASP A 15 110.964 24.941 59.191 1.00 83.44 N
*
COLUMNS DATA TYPE FIELD DEFINITION
---------------------------------------------------------------------------------
1 - 6 Record name "ATOM "
7 - 11 Integer serial Atom serial number.
13 - 16 Atom name Atom name.
17 Character altLoc Alternate location indicator.
18 - 20 Residue name resName Residue name.
22 Character chainID Chain identifier.
23 - 26 Integer resSeq Residue sequence number.
27 AChar iCode Code for insertion of residues.
31 - 38 Real(8.3) x Orthogonal coordinates for X in
Angstroms.
39 - 46 Real(8.3) y Orthogonal coordinates for Y in
Angstroms.
47 - 54 Real(8.3) z Orthogonal coordinates for Z in
Angstroms.
55 - 60 Real(6.2) occupancy Occupancy.
61 - 66 Real(6.2) tempFactor Temperature factor.
73 - 76 LString(4) segID Segment identifier, left-justified.
77 - 78 LString(2) element Element symbol, right-justified.
79 - 80 LString(2) charge Charge on the atom.
</pre>
*/
public static void toPDB(Atom a, StringBuffer str) {
Group g = a.getGroup();
Chain c = g.getChain();
String chainID = c.getChainID();
String type = g.getType() ;
String record = "" ;
if ( type.equals("hetatm") ) {
record = "HETATM";
} else {
record = "ATOM ";
}
// format output ...
//int groupsize = g.size();
String resName = g.getPDBName();
String pdbcode = g.getResidueNumber().toString();
//String line = "" ;
int seri = a.getPDBserial() ;
String serial = alignRight(""+seri,5) ;
String fullname = a.getFullName() ;
// System.out.println(" fullname: " + fullname + " : " + a.getAltLoc() + " : " + pdbcode);
Character altLoc = a.getAltLoc() ;
String resseq = "" ;
if ( hasInsertionCode(pdbcode) )
resseq = alignRight(""+pdbcode,5);
else
resseq = alignRight(""+pdbcode,4)+" ";
String x = alignRight(""+d3.format(a.getX()),8);
String y = alignRight(""+d3.format(a.getY()),8);
String z = alignRight(""+d3.format(a.getZ()),8);
String occupancy = alignRight(""+d2.format(a.getOccupancy()),6) ;
String tempfactor = alignRight(""+d2.format(a.getTempFactor()),6);
//System.out.println("fullname,zise:" + fullname + " " + fullname.length());
String leftResName = alignLeft(resName,3);
StringBuffer s = new StringBuffer();
s.append(record);
s.append(serial);
s.append(" ");
s.append(fullname);
s.append(altLoc);
s.append(leftResName);
s.append(" ");
s.append(chainID);
s.append(resseq);
s.append(" ");
s.append(x);
s.append(y);
s.append(z);
s.append(occupancy);
s.append(tempfactor);
Element e = a.getElement();
String eString = e.toString().toUpperCase();
if ( e.equals(Element.R)) {
eString = "X";
}
str.append(String.format("%-76s%2s", s.toString(),eString));
str.append(newline);
}
/** test if pdbserial has an insertion code */
private static boolean hasInsertionCode(String pdbserial) {
try {
Integer.parseInt(pdbserial) ;
} catch (NumberFormatException e) {
return true ;
}
return false ;
}
/** convert a protein Structure to a DAS Structure XML response .
* @param xw a XMLWriter object
* @throws IOException ...
*
*/
@SuppressWarnings("deprecation")
public void toDASStructure(XMLWriter xw)
throws IOException
{
/*xmlns="http://www.sanger.ac.uk/xml/das/2004/06/17/dasalignment.xsd" xmlns:align="http://www.sanger.ac.uk/xml/das/2004/06/17/alignment.xsd" xmlns:xsd="http://www.w3.org/2001/XMLSchema-instance" xsd:schemaLocation="http://www.sanger.ac.uk/xml/das/2004/06/17/dasalignment.xsd http://www.sanger.ac.uk/xml/das//2004/06/17/dasalignment.xsd"*/
if ( structure == null){
System.err.println("can not convert structure null");
return;
}
Map<String,Object> header = structure.getHeader();
xw.openTag("object");
xw.attribute("dbAccessionId",structure.getPDBCode());
xw.attribute("intObjectId" ,structure.getPDBCode());
// missing modification date
String modificationDate = (String)header.get("modDate") ;
xw.attribute("objectVersion",modificationDate);
xw.attribute("type","protein structure");
xw.attribute("dbSource","PDB");
xw.attribute("dbVersion","20070116");
xw.attribute("dbCoordSys","PDBresnum,Protein Structure");
// do we need object details ???
xw.closeTag("object");
// do for all models
for (int modelnr = 0;modelnr<structure.nrModels();modelnr++){
// do for all chains:
for (int chainnr = 0;chainnr<structure.size(modelnr);chainnr++){
Chain chain = (Chain)structure.getChain(modelnr,chainnr);
xw.openTag("chain");
xw.attribute("id",chain.getChainID());
xw.attribute("SwissprotId",chain.getSwissprotId() );
if (structure.isNmr()){
xw.attribute("model",Integer.toString(modelnr+1));
}
//do for all groups:
for (int groupnr =0;
groupnr<chain.getAtomLength()
;groupnr++){
Group gr = chain.getAtomGroup(groupnr);
xw.openTag("group");
xw.attribute("name",gr.getPDBName());
xw.attribute("type",gr.getType());
xw.attribute("groupID",gr.getResidueNumber().toString());
// do for all atoms:
//Atom[] atoms = gr.getAtoms();
List<Atom> atoms = gr.getAtoms();
for (int atomnr=0;atomnr<atoms.size();atomnr++){
Atom atom = (Atom)atoms.get(atomnr);
xw.openTag("atom");
xw.attribute("atomID",Integer.toString(atom.getPDBserial()));
xw.attribute("atomName",atom.getFullName());
xw.attribute("x",Double.toString(atom.getX()));
xw.attribute("y",Double.toString(atom.getY()));
xw.attribute("z",Double.toString(atom.getZ()));
xw.closeTag("atom");
}
xw.closeTag("group") ;
}
xw.closeTag("chain");
}
}
if ( doPrintConnections() ) {
// do connectivity for all chains:
List<Map<String,Integer>> cons = structure.getConnections();
for (int cnr = 0; cnr<cons.size();cnr++){
/*
the HashMap for a single CONECT line contains the following fields:
<ul>
<li>atomserial (mandatory) : Atom serial number
<li>bond1 .. bond4 (optional): Serial number of bonded atom
<li>hydrogen1 .. hydrogen4 (optional):Serial number of hydrogen bonded atom
<li>salt1 .. salt2 (optional): Serial number of salt bridged atom
</ul>
*/
Map<String, Integer> con = (Map<String, Integer>)cons.get(cnr);
Integer as = (Integer)con.get("atomserial");
int atomserial = as.intValue();
List<Integer> atomids = new ArrayList<Integer>() ;
// test salt and hydrogen first //
if (con.containsKey("salt1")) atomids.add(con.get("salt1"));
if (con.containsKey("salt2")) atomids.add(con.get("salt2"));
if (atomids.size()!=0){
addConnection(xw,"salt",atomserial,atomids);
atomids = new ArrayList<Integer>() ;
}
if (con.containsKey("hydrogen1")) atomids.add(con.get("hydrogen1"));
if (con.containsKey("hydrogen2")) atomids.add(con.get("hydrogen2"));
if (con.containsKey("hydrogen3")) atomids.add(con.get("hydrogen3"));
if (con.containsKey("hydrogen4")) atomids.add(con.get("hydrogen4"));
if (atomids.size()!=0){
addConnection(xw,"hydrogen",atomserial,atomids);
atomids = new ArrayList<Integer>() ;
}
if (con.containsKey("bond1")) atomids.add(con.get("bond1"));
if (con.containsKey("bond2")) atomids.add(con.get("bond2"));
if (con.containsKey("bond3")) atomids.add(con.get("bond3"));
if (con.containsKey("bond4")) atomids.add(con.get("bond4"));
if (atomids.size()!=0){
addConnection(xw,"bond",atomserial,atomids);
}
}
}
}
private void addConnection(XMLWriter xw,String connType, int atomserial, List<Integer> atomids){
try{
xw.openTag("connect");
xw.attribute("atomSerial",Integer.toString(atomserial));
xw.attribute("type",connType);
for (int i=0;i<atomids.size();i++){
Integer atomid = atomids.get(i);
if ( atomid == null)
continue;
int aid = atomid.intValue();
xw.openTag("atomID");
xw.attribute("atomID",Integer.toString(aid));
xw.closeTag("atomID");
}
xw.closeTag("connect");
} catch( Exception e) {
e.printStackTrace();
}
}
}
|
biojava3-structure/src/main/java/org/biojava/bio/structure/io/FileConvert.java
|
/*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. If you do not have a copy,
* see:
*
* http://www.gnu.org/copyleft/lesser.html
*
* Copyright for this code is held jointly by the individual
* authors. These should be listed in @author doc comments.
*
* For more information on the BioJava project and its aims,
* or to join the biojava-l mailing list, visit the home page
* at:
*
* http://www.biojava.org/
*
* Created on 26.04.2004
* @author Andreas Prlic
*
*/
package org.biojava.bio.structure.io;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.biojava.bio.structure.Atom;
import org.biojava.bio.structure.Chain;
import org.biojava.bio.structure.DBRef;
import org.biojava.bio.structure.Group;
import org.biojava.bio.structure.PDBHeader;
import org.biojava.bio.structure.SSBond;
import org.biojava.bio.structure.Site;
import org.biojava.bio.structure.Structure;
import org.biojava.bio.structure.StructureException;
import org.biojava3.core.util.XMLWriter;
/** Methods to convert a structure object into different file formats.
* @author Andreas Prlic
* @since 1.4
*/
public class FileConvert {
Structure structure ;
boolean printConnections;
// Locale should be english, e.g. in DE separator is "," -> PDB files have "." !
static DecimalFormat d3 = (DecimalFormat)NumberFormat.getInstance(java.util.Locale.UK);
static {
d3.setMaximumIntegerDigits(3);
d3.setMinimumFractionDigits(3);
d3.setMaximumFractionDigits(3);
}
static DecimalFormat d2 = (DecimalFormat)NumberFormat.getInstance(java.util.Locale.UK);
static {
d2.setMaximumIntegerDigits(2);
d2.setMinimumFractionDigits(2);
d2.setMaximumFractionDigits(2);
}
private static final String newline = System.getProperty("line.separator");
/**
* Constructs a FileConvert object.
*
* @param struc a Structure object
*/
public FileConvert(Structure struc) {
structure = struc ;
printConnections = true;
}
/** align a string to the right
* length is the total length the new string should take, inlcuding spaces on the left
* incredible that this tool is missing in java !!!
*/
private static String alignRight(String input, int length){
int n = input.length();
if ( n >= length)
return input;
String spaces = " " ;
int diff = length - n ;
StringBuffer s = new StringBuffer();
s.append(spaces.substring(0,diff));
s.append(input);
return s.toString();
}
private static String alignLeft(String input, int length){
if (input.length() >= length) {
return input;
}
String spaces = " " ;
input += spaces.substring(0, length - input.length() );
return input;
}
/** returns if the Connections should be added
* default is true;
* @return if the printConnections flag is set
*/
public boolean doPrintConnections() {
return printConnections;
}
/** enable/disable printing of connections
* connections are sometimes buggy in PDB files
* so there are some cases where one might turn this off.
* @param printConnections
*/
public void setPrintConnections(boolean printConnections) {
this.printConnections = printConnections;
}
/** prints the connections in PDB style
*
* Thanks to Tamas Horvath for this one
*/
private String printPDBConnections(){
StringBuffer str = new StringBuffer();
List<Map<String, Integer>> cons = structure.getConnections();
for (int cnr = 0; cnr<cons.size();cnr++){
Map<String,Integer> con = cons.get(cnr);
Integer as = (Integer) con.get("atomserial");
String atomserial = "";
String bond1 = "";
String bond2 = "";
String bond3 = "";
String bond4 = "";
String hyd1 = "";
String hyd2 = "";
String salt1 = "";
String hyd3 = "";
String hyd4 = "";
String salt2 = "";
if (con.containsKey("bond1")) bond1 = con.get("bond1").toString();
if (con.containsKey("bond2")) bond2 = con.get("bond2").toString();
if (con.containsKey("bond3")) bond3 = con.get("bond3").toString();
if (con.containsKey("bond4")) bond4 = con.get("bond4").toString();
if (con.containsKey("hyd1")) hyd1 = con.get("hyd1").toString();
if (con.containsKey("hyd2")) hyd2 = con.get("hyd2").toString();
if (con.containsKey("salt1")) salt1 = con.get("salt1").toString();
if (con.containsKey("hyd3")) hyd3 = con.get("hyd3").toString();
if (con.containsKey("hyd4")) hyd4 = con.get("hyd4").toString();
if (con.containsKey("salt2")) salt2 = con.get("salt2").toString();
atomserial = alignRight(""+as,5) ;
bond1 = alignRight(bond1,5) ;
bond2 = alignRight(bond2,5) ;
bond3 = alignRight(bond3,5) ;
bond4 = alignRight(bond4,5) ;
hyd1 = alignRight(hyd1,5) ;
hyd2 = alignRight(hyd2,5) ;
salt1 = alignRight(salt1,5) ;
hyd3 = alignRight(hyd3,5) ;
hyd4 = alignRight(hyd4,5) ;
salt2 = alignRight(salt2,5) ;
String connectLine = "CONECT" + atomserial + bond1 + bond2 + bond3 +
bond4 + hyd1 + hyd2 + salt1 + hyd3 + hyd4 + salt2;
str.append(connectLine).append(newline);
}
return str.toString();
}
/** Convert a structure into a PDB file.
* @return a String representing a PDB file.
*/
public String toPDB() {
StringBuffer str = new StringBuffer();
//int i = 0 ;
// TODO: print all the PDB header informaton in PDB style
// some objects (PDBHeader, Compound) are still missing
//
PDBHeader header = structure.getPDBHeader();
header.toPDB(str);
//REMARK 800
if (!structure.getSites().isEmpty()) {
str.append("REMARK 800 ").append(newline);
str.append("REMARK 800 SITE ").append(newline);
for (Site site : structure.getSites()) {
site.remark800toPDB(str);
}
}
//DBREF
for (DBRef dbref : structure.getDBRefs()){
dbref.toPDB(str);
str.append(newline);
}
//SSBOND
for (SSBond ssbond : structure.getSSBonds()){
ssbond.toPDB(str);
str.append(newline);
}
//SITE
for (Site site : structure.getSites()) {
try {
site.toPDB(str);
} catch (Exception e){
e.printStackTrace();
}
}
//
// print the atom records
//
// do for all models
int nrModels = structure.nrModels() ;
if ( structure.isNmr()) {
str.append("EXPDTA NMR, "+ nrModels+" STRUCTURES"+newline) ;
}
for (int m = 0 ; m < nrModels ; m++) {
List<Chain> model = structure.getModel(m);
// todo support NMR structures ...
if ( structure.isNmr()) {
str.append("MODEL " + (m+1)+ newline);
}
// do for all chains
int nrChains = model.size();
for ( int c =0; c<nrChains;c++) {
Chain chain = model.get(c);
//String chainID = chain.getChainID();
//if ( chainID.equals(DEFAULTCHAIN) ) chainID = " ";
// do for all groups
int nrGroups = chain.getAtomLength();
for ( int h=0; h<nrGroups;h++){
Group g= chain.getAtomGroup(h);
toPDB(g,str);
}
}
if ( structure.isNmr()) {
str.append("ENDMDL").append(newline);
}
}
if ( doPrintConnections() )
str.append(printPDBConnections());
return str.toString() ;
}
private void toPDB(Group g, StringBuffer str) {
// iterate over all atoms ...
// format output ...
int groupsize = g.size();
for ( int atompos = 0 ; atompos < groupsize; atompos++) {
Atom a = null ;
try {
a = g.getAtom(atompos);
} catch ( StructureException e) {
System.err.println(e);
continue ;
}
toPDB(a, str);
//line = record + serial + " " + fullname +altLoc
//+ leftResName + " " + chainID + resseq
//+ " " + x+y+z
//+ occupancy + tempfactor;
//str.append(line + newline);
//System.out.println(line);
}
if ( g.hasAltLoc()){
for (Group alt : g.getAltLocs() ) {
toPDB(alt,str);
}
}
}
/** Prints the content of an Atom object as a PDB formatted line.
*
* @param a
* @return
*/
public static String toPDB(Atom a){
StringBuffer w = new StringBuffer();
toPDB(a,w);
return w.toString();
}
/**
* print ATOM record in the following syntax
<pre>
ATOM 1 N ASP A 15 110.964 24.941 59.191 1.00 83.44 N
*
COLUMNS DATA TYPE FIELD DEFINITION
---------------------------------------------------------------------------------
1 - 6 Record name "ATOM "
7 - 11 Integer serial Atom serial number.
13 - 16 Atom name Atom name.
17 Character altLoc Alternate location indicator.
18 - 20 Residue name resName Residue name.
22 Character chainID Chain identifier.
23 - 26 Integer resSeq Residue sequence number.
27 AChar iCode Code for insertion of residues.
31 - 38 Real(8.3) x Orthogonal coordinates for X in
Angstroms.
39 - 46 Real(8.3) y Orthogonal coordinates for Y in
Angstroms.
47 - 54 Real(8.3) z Orthogonal coordinates for Z in
Angstroms.
55 - 60 Real(6.2) occupancy Occupancy.
61 - 66 Real(6.2) tempFactor Temperature factor.
73 - 76 LString(4) segID Segment identifier, left-justified.
77 - 78 LString(2) element Element symbol, right-justified.
79 - 80 LString(2) charge Charge on the atom.
</pre>
*/
public static void toPDB(Atom a, StringBuffer str) {
Group g = a.getGroup();
Chain c = g.getChain();
String chainID = c.getChainID();
String type = g.getType() ;
String record = "" ;
if ( type.equals("hetatm") ) {
record = "HETATM";
} else {
record = "ATOM ";
}
// format output ...
//int groupsize = g.size();
String resName = g.getPDBName();
String pdbcode = g.getResidueNumber().toString();
//String line = "" ;
int seri = a.getPDBserial() ;
String serial = alignRight(""+seri,5) ;
String fullname = a.getFullName() ;
// System.out.println(" fullname: " + fullname + " : " + a.getAltLoc() + " : " + pdbcode);
Character altLoc = a.getAltLoc() ;
String resseq = "" ;
if ( hasInsertionCode(pdbcode) )
resseq = alignRight(""+pdbcode,5);
else
resseq = alignRight(""+pdbcode,4)+" ";
String x = alignRight(""+d3.format(a.getX()),8);
String y = alignRight(""+d3.format(a.getY()),8);
String z = alignRight(""+d3.format(a.getZ()),8);
String occupancy = alignRight(""+d2.format(a.getOccupancy()),6) ;
String tempfactor = alignRight(""+d2.format(a.getTempFactor()),6);
//System.out.println("fullname,zise:" + fullname + " " + fullname.length());
String leftResName = alignLeft(resName,3);
StringBuffer s = new StringBuffer();
s.append(record);
s.append(serial);
s.append(" ");
s.append(fullname);
s.append(altLoc);
s.append(leftResName);
s.append(" ");
s.append(chainID);
s.append(resseq);
s.append(" ");
s.append(x);
s.append(y);
s.append(z);
s.append(occupancy);
s.append(tempfactor);
str.append(String.format("%-76s%2s", s.toString(),a.getElement().toString() ));
str.append(newline);
}
/** test if pdbserial has an insertion code */
private static boolean hasInsertionCode(String pdbserial) {
try {
Integer.parseInt(pdbserial) ;
} catch (NumberFormatException e) {
return true ;
}
return false ;
}
/** convert a protein Structure to a DAS Structure XML response .
* @param xw a XMLWriter object
* @throws IOException ...
*
*/
@SuppressWarnings("deprecation")
public void toDASStructure(XMLWriter xw)
throws IOException
{
/*xmlns="http://www.sanger.ac.uk/xml/das/2004/06/17/dasalignment.xsd" xmlns:align="http://www.sanger.ac.uk/xml/das/2004/06/17/alignment.xsd" xmlns:xsd="http://www.w3.org/2001/XMLSchema-instance" xsd:schemaLocation="http://www.sanger.ac.uk/xml/das/2004/06/17/dasalignment.xsd http://www.sanger.ac.uk/xml/das//2004/06/17/dasalignment.xsd"*/
if ( structure == null){
System.err.println("can not convert structure null");
return;
}
Map<String,Object> header = structure.getHeader();
xw.openTag("object");
xw.attribute("dbAccessionId",structure.getPDBCode());
xw.attribute("intObjectId" ,structure.getPDBCode());
// missing modification date
String modificationDate = (String)header.get("modDate") ;
xw.attribute("objectVersion",modificationDate);
xw.attribute("type","protein structure");
xw.attribute("dbSource","PDB");
xw.attribute("dbVersion","20070116");
xw.attribute("dbCoordSys","PDBresnum,Protein Structure");
// do we need object details ???
xw.closeTag("object");
// do for all models
for (int modelnr = 0;modelnr<structure.nrModels();modelnr++){
// do for all chains:
for (int chainnr = 0;chainnr<structure.size(modelnr);chainnr++){
Chain chain = (Chain)structure.getChain(modelnr,chainnr);
xw.openTag("chain");
xw.attribute("id",chain.getChainID());
xw.attribute("SwissprotId",chain.getSwissprotId() );
if (structure.isNmr()){
xw.attribute("model",Integer.toString(modelnr+1));
}
//do for all groups:
for (int groupnr =0;
groupnr<chain.getAtomLength()
;groupnr++){
Group gr = chain.getAtomGroup(groupnr);
xw.openTag("group");
xw.attribute("name",gr.getPDBName());
xw.attribute("type",gr.getType());
xw.attribute("groupID",gr.getResidueNumber().toString());
// do for all atoms:
//Atom[] atoms = gr.getAtoms();
List<Atom> atoms = gr.getAtoms();
for (int atomnr=0;atomnr<atoms.size();atomnr++){
Atom atom = (Atom)atoms.get(atomnr);
xw.openTag("atom");
xw.attribute("atomID",Integer.toString(atom.getPDBserial()));
xw.attribute("atomName",atom.getFullName());
xw.attribute("x",Double.toString(atom.getX()));
xw.attribute("y",Double.toString(atom.getY()));
xw.attribute("z",Double.toString(atom.getZ()));
xw.closeTag("atom");
}
xw.closeTag("group") ;
}
xw.closeTag("chain");
}
}
if ( doPrintConnections() ) {
// do connectivity for all chains:
List<Map<String,Integer>> cons = structure.getConnections();
for (int cnr = 0; cnr<cons.size();cnr++){
/*
the HashMap for a single CONECT line contains the following fields:
<ul>
<li>atomserial (mandatory) : Atom serial number
<li>bond1 .. bond4 (optional): Serial number of bonded atom
<li>hydrogen1 .. hydrogen4 (optional):Serial number of hydrogen bonded atom
<li>salt1 .. salt2 (optional): Serial number of salt bridged atom
</ul>
*/
Map<String, Integer> con = (Map<String, Integer>)cons.get(cnr);
Integer as = (Integer)con.get("atomserial");
int atomserial = as.intValue();
List<Integer> atomids = new ArrayList<Integer>() ;
// test salt and hydrogen first //
if (con.containsKey("salt1")) atomids.add(con.get("salt1"));
if (con.containsKey("salt2")) atomids.add(con.get("salt2"));
if (atomids.size()!=0){
addConnection(xw,"salt",atomserial,atomids);
atomids = new ArrayList<Integer>() ;
}
if (con.containsKey("hydrogen1")) atomids.add(con.get("hydrogen1"));
if (con.containsKey("hydrogen2")) atomids.add(con.get("hydrogen2"));
if (con.containsKey("hydrogen3")) atomids.add(con.get("hydrogen3"));
if (con.containsKey("hydrogen4")) atomids.add(con.get("hydrogen4"));
if (atomids.size()!=0){
addConnection(xw,"hydrogen",atomserial,atomids);
atomids = new ArrayList<Integer>() ;
}
if (con.containsKey("bond1")) atomids.add(con.get("bond1"));
if (con.containsKey("bond2")) atomids.add(con.get("bond2"));
if (con.containsKey("bond3")) atomids.add(con.get("bond3"));
if (con.containsKey("bond4")) atomids.add(con.get("bond4"));
if (atomids.size()!=0){
addConnection(xw,"bond",atomserial,atomids);
}
}
}
}
private void addConnection(XMLWriter xw,String connType, int atomserial, List<Integer> atomids){
try{
xw.openTag("connect");
xw.attribute("atomSerial",Integer.toString(atomserial));
xw.attribute("type",connType);
for (int i=0;i<atomids.size();i++){
Integer atomid = atomids.get(i);
if ( atomid == null)
continue;
int aid = atomid.intValue();
xw.openTag("atomID");
xw.attribute("atomID",Integer.toString(aid));
xw.closeTag("atomID");
}
xw.closeTag("connect");
} catch( Exception e) {
e.printStackTrace();
}
}
}
|
converting ELement R to X
git-svn-id: ed25c26de1c5325e8eb0deed0b990ab8af8a4def@9553 7c6358e6-4a41-0410-a743-a5b2a554c398
|
biojava3-structure/src/main/java/org/biojava/bio/structure/io/FileConvert.java
|
converting ELement R to X
|
|
Java
|
lgpl-2.1
|
2e3910f204fe79724beb41993b9c585ae8672916
| 0
|
spotbugs/spotbugs,spotbugs/spotbugs,spotbugs/spotbugs,spotbugs/spotbugs,spotbugs/spotbugs
|
/*
* FindBugs - Find Bugs in Java programs
* Copyright (C) 2003-2007 University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs.classfile.engine;
import java.util.TreeSet;
import javax.annotation.CheckForNull;
import org.apache.bcel.classfile.ConstantClass;
import org.apache.bcel.classfile.ConstantNameAndType;
import org.apache.bcel.classfile.Field;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.classfile.Method;
import edu.umd.cs.findbugs.classfile.ClassDescriptor;
import edu.umd.cs.findbugs.classfile.DescriptorFactory;
import edu.umd.cs.findbugs.classfile.FieldDescriptor;
import edu.umd.cs.findbugs.classfile.ICodeBaseEntry;
import edu.umd.cs.findbugs.classfile.InvalidClassFileFormatException;
import edu.umd.cs.findbugs.classfile.MethodDescriptor;
import edu.umd.cs.findbugs.classfile.analysis.ClassInfo;
import edu.umd.cs.findbugs.classfile.analysis.ClassNameAndSuperclassInfo;
import edu.umd.cs.findbugs.internalAnnotations.SlashedClassName;
import edu.umd.cs.findbugs.util.ClassName;
import edu.umd.cs.findbugs.visitclass.AnnotationVisitor;
/**
* @author William Pugh
*/
@Deprecated
public class ClassParserUsingBCEL implements ClassParserInterface {
private final JavaClass javaClass;
private final String slashedClassName;
private final ClassDescriptor expectedClassDescriptor;
private final ICodeBaseEntry codeBaseEntry;
public ClassParserUsingBCEL(JavaClass javaClass, @CheckForNull ClassDescriptor expectedClassDescriptor,
ICodeBaseEntry codeBaseEntry) {
this.javaClass = javaClass;
this.slashedClassName = javaClass.getClassName().replace('.', '/');
this.expectedClassDescriptor = expectedClassDescriptor;
this.codeBaseEntry = codeBaseEntry;
}
/*
* (non-Javadoc)
*
* @see
* edu.umd.cs.findbugs.classfile.engine.ClassParserInterface#parse(edu.umd
* .cs.findbugs.classfile.analysis.ClassNameAndSuperclassInfo.Builder)
*/
@Override
public void parse(final ClassNameAndSuperclassInfo.Builder builder) throws InvalidClassFileFormatException {
builder.setCodeBaseEntry(codeBaseEntry);
builder.setAccessFlags(javaClass.getAccessFlags());
ClassDescriptor classDescriptor = DescriptorFactory.createClassDescriptorFromDottedClassName(javaClass.getClassName());
if (expectedClassDescriptor != null && expectedClassDescriptor.equals(classDescriptor)) {
throw new InvalidClassFileFormatException("Expected " + expectedClassDescriptor, classDescriptor, codeBaseEntry);
}
builder.setClassDescriptor(classDescriptor);
builder.setSuperclassDescriptor(DescriptorFactory.createClassDescriptorFromDottedClassName(javaClass.getSuperclassName()));
String[] allInterfaces = javaClass.getInterfaceNames();
ClassDescriptor[] allInterfaceDescriptiors;
if (allInterfaces.length == 0) {
allInterfaceDescriptiors = ClassDescriptor.EMPTY_ARRAY;
} else {
allInterfaceDescriptiors = new ClassDescriptor[allInterfaces.length];
for (int i = 0; i < allInterfaces.length; i++) {
allInterfaceDescriptiors[i] = DescriptorFactory.createClassDescriptorFromDottedClassName(allInterfaces[i]);
}
}
builder.setInterfaceDescriptorList(allInterfaceDescriptiors);
}
/*
* (non-Javadoc)
*
* @see
* edu.umd.cs.findbugs.classfile.engine.ClassParserInterface#parse(edu.umd
* .cs.findbugs.classfile.analysis.ClassInfo.Builder)
*/
@Override
public void parse(ClassInfo.Builder builder) throws InvalidClassFileFormatException {
parse((ClassNameAndSuperclassInfo.Builder) builder);
final TreeSet<ClassDescriptor> referencedClassSet = new TreeSet<>();
javaClass.accept(new AnnotationVisitor() {
@Override
public void visit(ConstantClass obj) {
@SlashedClassName
String className = obj.getBytes(javaClass.getConstantPool());
if (className.indexOf('[') >= 0) {
ClassParser.extractReferencedClassesFromSignature(referencedClassSet, className);
} else if (ClassName.isValidClassName(className)) {
referencedClassSet.add(DescriptorFactory.instance().getClassDescriptor(className));
}
}
@Override
public void visit(ConstantNameAndType obj) {
String signature = obj.getSignature(javaClass.getConstantPool());
ClassParser.extractReferencedClassesFromSignature(referencedClassSet, signature);
}
});
}
/**
* @param obj
* the field to parse
* @return a descriptor for the field
*/
protected FieldDescriptor parseField(Field obj) {
return new FieldDescriptor(slashedClassName, obj.getName(), obj.getSignature(), obj.isStatic());
}
/**
* @param obj
* the method to parse
* @return a descriptor for the method
*/
protected MethodDescriptor parseMethod(Method obj) {
return new MethodDescriptor(slashedClassName, obj.getName(), obj.getSignature(), obj.isStatic());
}
}
|
spotbugs/src/main/java/edu/umd/cs/findbugs/classfile/engine/ClassParserUsingBCEL.java
|
/*
* FindBugs - Find Bugs in Java programs
* Copyright (C) 2003-2007 University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs.classfile.engine;
import java.util.LinkedList;
import java.util.List;
import java.util.TreeSet;
import javax.annotation.CheckForNull;
import org.apache.bcel.classfile.ConstantClass;
import org.apache.bcel.classfile.ConstantNameAndType;
import org.apache.bcel.classfile.Field;
import org.apache.bcel.classfile.JavaClass;
import org.apache.bcel.classfile.Method;
import edu.umd.cs.findbugs.classfile.ClassDescriptor;
import edu.umd.cs.findbugs.classfile.DescriptorFactory;
import edu.umd.cs.findbugs.classfile.FieldDescriptor;
import edu.umd.cs.findbugs.classfile.ICodeBaseEntry;
import edu.umd.cs.findbugs.classfile.InvalidClassFileFormatException;
import edu.umd.cs.findbugs.classfile.MethodDescriptor;
import edu.umd.cs.findbugs.classfile.analysis.ClassInfo;
import edu.umd.cs.findbugs.classfile.analysis.ClassNameAndSuperclassInfo;
import edu.umd.cs.findbugs.internalAnnotations.SlashedClassName;
import edu.umd.cs.findbugs.util.ClassName;
import edu.umd.cs.findbugs.visitclass.AnnotationVisitor;
/**
* @author William Pugh
*/
@Deprecated
public class ClassParserUsingBCEL implements ClassParserInterface {
private final JavaClass javaClass;
private final String slashedClassName;
private final ClassDescriptor expectedClassDescriptor;
private final ICodeBaseEntry codeBaseEntry;
public ClassParserUsingBCEL(JavaClass javaClass, @CheckForNull ClassDescriptor expectedClassDescriptor,
ICodeBaseEntry codeBaseEntry) {
this.javaClass = javaClass;
this.slashedClassName = javaClass.getClassName().replace('.', '/');
this.expectedClassDescriptor = expectedClassDescriptor;
this.codeBaseEntry = codeBaseEntry;
}
/*
* (non-Javadoc)
*
* @see
* edu.umd.cs.findbugs.classfile.engine.ClassParserInterface#parse(edu.umd
* .cs.findbugs.classfile.analysis.ClassNameAndSuperclassInfo.Builder)
*/
@Override
public void parse(final ClassNameAndSuperclassInfo.Builder builder) throws InvalidClassFileFormatException {
builder.setCodeBaseEntry(codeBaseEntry);
builder.setAccessFlags(javaClass.getAccessFlags());
ClassDescriptor classDescriptor = DescriptorFactory.createClassDescriptorFromDottedClassName(javaClass.getClassName());
if (expectedClassDescriptor != null && expectedClassDescriptor.equals(classDescriptor)) {
throw new InvalidClassFileFormatException("Expected " + expectedClassDescriptor, classDescriptor, codeBaseEntry);
}
builder.setClassDescriptor(classDescriptor);
builder.setSuperclassDescriptor(DescriptorFactory.createClassDescriptorFromDottedClassName(javaClass.getSuperclassName()));
String[] allInterfaces = javaClass.getInterfaceNames();
ClassDescriptor[] allInterfaceDescriptiors;
if (allInterfaces.length == 0) {
allInterfaceDescriptiors = ClassDescriptor.EMPTY_ARRAY;
} else {
allInterfaceDescriptiors = new ClassDescriptor[allInterfaces.length];
for (int i = 0; i < allInterfaces.length; i++) {
allInterfaceDescriptiors[i] = DescriptorFactory.createClassDescriptorFromDottedClassName(allInterfaces[i]);
}
}
builder.setInterfaceDescriptorList(allInterfaceDescriptiors);
}
/*
* (non-Javadoc)
*
* @see
* edu.umd.cs.findbugs.classfile.engine.ClassParserInterface#parse(edu.umd
* .cs.findbugs.classfile.analysis.ClassInfo.Builder)
*/
@Override
public void parse(ClassInfo.Builder builder) throws InvalidClassFileFormatException {
parse((ClassNameAndSuperclassInfo.Builder) builder);
final List<FieldDescriptor> fieldDescriptorList = new LinkedList<>();
final List<MethodDescriptor> methodDescriptorList = new LinkedList<>();
final TreeSet<ClassDescriptor> referencedClassSet = new TreeSet<>();
javaClass.accept(new AnnotationVisitor() {
@Override
public void visit(Method obj) {
methodDescriptorList.add(parseMethod(obj));
}
@Override
public void visit(Field obj) {
fieldDescriptorList.add(parseField(obj));
}
@Override
public void visit(ConstantClass obj) {
@SlashedClassName
String className = obj.getBytes(javaClass.getConstantPool());
if (className.indexOf('[') >= 0) {
ClassParser.extractReferencedClassesFromSignature(referencedClassSet, className);
} else if (ClassName.isValidClassName(className)) {
referencedClassSet.add(DescriptorFactory.instance().getClassDescriptor(className));
}
}
@Override
public void visit(ConstantNameAndType obj) {
String signature = obj.getSignature(javaClass.getConstantPool());
ClassParser.extractReferencedClassesFromSignature(referencedClassSet, signature);
}
});
}
/**
* @param obj
* the field to parse
* @return a descriptor for the field
*/
protected FieldDescriptor parseField(Field obj) {
return new FieldDescriptor(slashedClassName, obj.getName(), obj.getSignature(), obj.isStatic());
}
/**
* @param obj
* the method to parse
* @return a descriptor for the method
*/
protected MethodDescriptor parseMethod(Method obj) {
return new MethodDescriptor(slashedClassName, obj.getName(), obj.getSignature(), obj.isStatic());
}
}
|
fix: remove needless interactions in parsing with BCEL
https://lgtm.com/projects/g/spotbugs/spotbugs/snapshot/cb497a1f62699cda1234e68403321edf18c01084/files/spotbugs/src/main/java/edu/umd/cs/findbugs/classfile/engine/ClassParserUsingBCEL.java?sort=name&dir=ASC&mode=heatmap#xa1928c69510e0b76:1
|
spotbugs/src/main/java/edu/umd/cs/findbugs/classfile/engine/ClassParserUsingBCEL.java
|
fix: remove needless interactions in parsing with BCEL
|
|
Java
|
apache-2.0
|
c218db53f4d726f495b2098e2cafe1e37f5dcfb5
| 0
|
openengsb-labs/labs-jpatest
|
/**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.labs.jpatest.junit;
import org.junit.rules.MethodRule;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.Statement;
import javax.persistence.*;
import javax.persistence.metamodel.ManagedType;
import java.util.*;
public class TestPersistenceUnit implements MethodRule {
private static Map<String, EntityManagerFactory> emCache = new HashMap<String, EntityManagerFactory>();
private Set<EntityManagerFactory> usedPersistenceUnits = new HashSet<EntityManagerFactory>();
public TestPersistenceUnit() {
}
private EntityManager makeEntityManager(EntityManagerFactory emf) {
Properties emProperties = new Properties();
emProperties.put("openjpa.TransactionMode", "local");
emProperties.put("openjpa.ConnectionFactoryMode", "local");
return emf.createEntityManager(emProperties);
}
private EntityManagerFactory makeEntityManagerFactory(String s) {
Properties props = new Properties();
props.put("openjpa.ConnectionURL", String.format("jdbc:h2:mem:%s;DB_CLOSE_DELAY=-1", s));
props.put("openjpa.ConnectionDriverName", "org.h2.Driver");
props.put("openjpa.Connection2URL", String.format("jdbc:h2:mem:%s;DB_CLOSE_DELAY=-1", s));
props.put("openjpa.Connection2DriverName", "org.h2.Driver");
props.put("openjpa.jdbc.SynchronizeMappings",
"buildSchema(SchemaAction='add')");
props.put("openjpa.ConnectionRetainMode", "always");
props.put("openjpa.ConnectionFactoryMode", "local");
return Persistence.createEntityManagerFactory(s, props);
}
public EntityManager getEntityManager(String s) {
EntityManagerFactory entityManagerFactory = getEntityManagerFactory(s);
usedPersistenceUnits.add(entityManagerFactory);
return makeEntityManager(entityManagerFactory);
}
private EntityManagerFactory getEntityManagerFactory(String s) {
if (!emCache.containsKey(s)) {
emCache.put(s, makeEntityManagerFactory(s));
}
return emCache.get(s);
}
private class PersistenceStatement extends Statement {
private Statement parent;
private PersistenceStatement(Statement parent) {
this.parent = parent;
}
@Override
public void evaluate() throws Throwable {
parent.evaluate();
for (EntityManagerFactory emf : usedPersistenceUnits) {
clearTables(emf);
}
}
private void clearTables(EntityManagerFactory emf) {
Set<ManagedType<?>> types = emf.getMetamodel().getManagedTypes();
for (ManagedType<?> type : types) {
Class<?> javaType = type.getJavaType();
EntityManager entityManager = makeEntityManager(emf);
entityManager.getTransaction().begin();
String name = retrieveEntityName(javaType);
if (name == null){
continue;
}
Query query = entityManager.createQuery("DELETE FROM " + name);
query.executeUpdate();
entityManager.getTransaction().commit();
}
}
private String retrieveEntityName(Class<?> javaType) {
Entity entity = javaType.getAnnotation(Entity.class);
if (entity == null) {
return null;
}
if (entity.name().isEmpty()) {
return javaType.getSimpleName();
}
return entity.name();
}
}
@Override
public Statement apply(Statement statement, FrameworkMethod frameworkMethod, Object o) {
return new PersistenceStatement(statement);
}
}
|
core/src/main/java/org/openengsb/labs/jpatest/junit/TestPersistenceUnit.java
|
/**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.labs.jpatest.junit;
import org.junit.rules.MethodRule;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.Statement;
import javax.persistence.*;
import javax.persistence.metamodel.ManagedType;
import java.util.*;
public class TestPersistenceUnit implements MethodRule {
private static Map<String, EntityManagerFactory> emCache = new HashMap<String, EntityManagerFactory>();
private Set<EntityManagerFactory> usedPersistenceUnits = new HashSet<EntityManagerFactory>();
public TestPersistenceUnit() {
}
private EntityManager makeEntityManager(EntityManagerFactory emf) {
Properties emProperties = new Properties();
emProperties.put("openjpa.TransactionMode", "local");
emProperties.put("openjpa.ConnectionFactoryMode", "local");
return emf.createEntityManager(emProperties);
}
private EntityManagerFactory makeEntityManagerFactory(String s) {
Properties props = new Properties();
props.put("openjpa.ConnectionURL", String.format("jdbc:h2:mem:%s;DB_CLOSE_DELAY=-1", s));
props.put("openjpa.ConnectionDriverName", "org.h2.Driver");
props.put("openjpa.Connection2URL", String.format("jdbc:h2:mem:%s;DB_CLOSE_DELAY=-1", s));
props.put("openjpa.Connection2DriverName", "org.h2.Driver");
props.put("openjpa.jdbc.SynchronizeMappings",
"buildSchema(SchemaAction='add')");
props.put("openjpa.ConnectionRetainMode", "always");
props.put("openjpa.ConnectionFactoryMode", "local");
return Persistence.createEntityManagerFactory(s, props);
}
public EntityManager getEntityManager(String s) {
EntityManagerFactory entityManagerFactory = getEntityManagerFactory(s);
usedPersistenceUnits.add(entityManagerFactory);
return makeEntityManager(entityManagerFactory);
}
private EntityManagerFactory getEntityManagerFactory(String s) {
if (!emCache.containsKey(s)) {
emCache.put(s, makeEntityManagerFactory(s));
}
return emCache.get(s);
}
private class PersistenceStatement extends Statement {
private Statement parent;
private PersistenceStatement(Statement parent) {
this.parent = parent;
}
@Override
public void evaluate() throws Throwable {
parent.evaluate();
for (EntityManagerFactory emf : usedPersistenceUnits) {
clearTables(emf);
}
}
private void clearTables(EntityManagerFactory emf) {
Set<ManagedType<?>> types = emf.getMetamodel().getManagedTypes();
for (ManagedType<?> type : types) {
Class<?> javaType = type.getJavaType();
EntityManager entityManager = makeEntityManager(emf);
entityManager.getTransaction().begin();
Query query = entityManager.createQuery("DELETE FROM " + retrieveEntityName(javaType));
query.executeUpdate();
entityManager.getTransaction().commit();
}
}
private String retrieveEntityName(Class<?> javaType) {
Entity entity = javaType.getAnnotation(Entity.class);
if (entity.name().isEmpty()) {
return javaType.getSimpleName();
}
return entity.name();
}
}
@Override
public Statement apply(Statement statement, FrameworkMethod frameworkMethod, Object o) {
return new PersistenceStatement(statement);
}
}
|
skip table-clearing for non-entities in metamodel
|
core/src/main/java/org/openengsb/labs/jpatest/junit/TestPersistenceUnit.java
|
skip table-clearing for non-entities in metamodel
|
|
Java
|
apache-2.0
|
ecef2ac4504e53aa67672416e478f6ce8cf59cc6
| 0
|
GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit
|
// Copyright (C) 2020 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.submit;
import com.google.common.collect.ImmutableSet;
import com.google.common.flogger.FluentLogger;
import com.google.gerrit.entities.BranchNameKey;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.inject.Inject;
import java.util.Set;
import org.eclipse.jgit.lib.Config;
/**
* Wrap a {@link SubscriptionGraph.Factory} to honor the gerrit configuration.
*
* <p>If superproject subscriptions are disabled in the conf, return an empty graph.
*/
public class ConfiguredSubscriptionGraphFactory implements SubscriptionGraph.Factory {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final SubscriptionGraph.Factory subscriptionGraphFactory;
private final Config cfg;
@Inject
ConfiguredSubscriptionGraphFactory(
@VanillaSubscriptionGraph SubscriptionGraph.Factory subscriptionGraphFactory,
@GerritServerConfig Config cfg) {
this.subscriptionGraphFactory = subscriptionGraphFactory;
this.cfg = cfg;
}
@Override
public SubscriptionGraph compute(Set<BranchNameKey> updatedBranches, MergeOpRepoManager orm)
throws SubmoduleConflictException {
if (cfg.getBoolean("submodule", "enableSuperProjectSubscriptions", true)) {
return subscriptionGraphFactory.compute(updatedBranches, orm);
}
logger.atFine().log("Updating superprojects disabled");
return SubscriptionGraph.createEmptyGraph(ImmutableSet.copyOf(updatedBranches));
}
}
|
java/com/google/gerrit/server/submit/ConfiguredSubscriptionGraphFactory.java
|
// Copyright (C) 2020 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.submit;
import com.google.common.collect.ImmutableSet;
import com.google.common.flogger.FluentLogger;
import com.google.gerrit.entities.BranchNameKey;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.inject.Inject;
import java.util.Set;
import org.eclipse.jgit.lib.Config;
/**
* Wrap a {@link SubscriptionGraph.Factory} to honor the gerrit configuration.
*
* <p>If superproject subscriptions are disabled in the conf, return an empty graph.
*/
public class ConfiguredSubscriptionGraphFactory implements SubscriptionGraph.Factory {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final SubscriptionGraph.Factory subscriptionGraphFactory;
private final Config cfg;
@Inject
ConfiguredSubscriptionGraphFactory(
@VanillaSubscriptionGraph SubscriptionGraph.Factory subscriptionGraphFactory,
@GerritServerConfig Config cfg) {
this.subscriptionGraphFactory = subscriptionGraphFactory;
this.cfg = cfg;
}
@Override
public SubscriptionGraph compute(Set<BranchNameKey> updatedBranches, MergeOpRepoManager orm)
throws SubmoduleConflictException {
if (cfg.getBoolean("submodule", "enableSuperProjectSubscriptions", true)) {
return subscriptionGraphFactory.compute(updatedBranches, orm);
} else {
logger.atFine().log("Updating superprojects disabled");
return SubscriptionGraph.createEmptyGraph(ImmutableSet.copyOf(updatedBranches));
}
}
}
|
Remove unnecessary else clause increasing nesting level
Change-Id: I216ab29b3ded6414dd195ceb1bed40ca849001bf
|
java/com/google/gerrit/server/submit/ConfiguredSubscriptionGraphFactory.java
|
Remove unnecessary else clause increasing nesting level
|
|
Java
|
apache-2.0
|
24f9f506cc15960e3f843c00b5c8969798ce70a9
| 0
|
WilliamZapata/alluxio,apc999/alluxio,jsimsa/alluxio,uronce-cc/alluxio,aaudiber/alluxio,ChangerYoung/alluxio,uronce-cc/alluxio,EvilMcJerkface/alluxio,maobaolong/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,maboelhassan/alluxio,bf8086/alluxio,Reidddddd/mo-alluxio,aaudiber/alluxio,WilliamZapata/alluxio,ChangerYoung/alluxio,apc999/alluxio,ShailShah/alluxio,Alluxio/alluxio,PasaLab/tachyon,yuluo-ding/alluxio,apc999/alluxio,Reidddddd/alluxio,apc999/alluxio,ShailShah/alluxio,riversand963/alluxio,calvinjia/tachyon,maobaolong/alluxio,Reidddddd/alluxio,wwjiang007/alluxio,jsimsa/alluxio,madanadit/alluxio,Reidddddd/mo-alluxio,Alluxio/alluxio,Alluxio/alluxio,maboelhassan/alluxio,jswudi/alluxio,madanadit/alluxio,ShailShah/alluxio,riversand963/alluxio,WilliamZapata/alluxio,madanadit/alluxio,bf8086/alluxio,yuluo-ding/alluxio,EvilMcJerkface/alluxio,Reidddddd/alluxio,bf8086/alluxio,Reidddddd/alluxio,calvinjia/tachyon,uronce-cc/alluxio,madanadit/alluxio,jswudi/alluxio,calvinjia/tachyon,madanadit/alluxio,bf8086/alluxio,WilliamZapata/alluxio,Reidddddd/mo-alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,jsimsa/alluxio,wwjiang007/alluxio,calvinjia/tachyon,maobaolong/alluxio,wwjiang007/alluxio,yuluo-ding/alluxio,PasaLab/tachyon,wwjiang007/alluxio,PasaLab/tachyon,maobaolong/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,uronce-cc/alluxio,Alluxio/alluxio,ChangerYoung/alluxio,calvinjia/tachyon,ChangerYoung/alluxio,aaudiber/alluxio,wwjiang007/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,Reidddddd/alluxio,maboelhassan/alluxio,madanadit/alluxio,wwjiang007/alluxio,maobaolong/alluxio,wwjiang007/alluxio,EvilMcJerkface/alluxio,yuluo-ding/alluxio,riversand963/alluxio,Reidddddd/mo-alluxio,yuluo-ding/alluxio,EvilMcJerkface/alluxio,ShailShah/alluxio,jsimsa/alluxio,bf8086/alluxio,jswudi/alluxio,calvinjia/tachyon,maobaolong/alluxio,PasaLab/tachyon,PasaLab/tachyon,aaudiber/alluxio,ShailShah/alluxio,riversand963/alluxio,jsimsa/alluxio,apc999/alluxio,apc999/alluxio,uronce-cc/alluxio,wwjiang007/alluxio,Alluxio/alluxio,WilliamZapata/alluxio,madanadit/alluxio,calvinjia/tachyon,maobaolong/alluxio,wwjiang007/alluxio,riversand963/alluxio,Reidddddd/alluxio,ChangerYoung/alluxio,jswudi/alluxio,ChangerYoung/alluxio,aaudiber/alluxio,Reidddddd/mo-alluxio,calvinjia/tachyon,apc999/alluxio,maboelhassan/alluxio,jswudi/alluxio,maboelhassan/alluxio,uronce-cc/alluxio,Alluxio/alluxio,maboelhassan/alluxio,Alluxio/alluxio,maobaolong/alluxio,bf8086/alluxio,madanadit/alluxio,PasaLab/tachyon,bf8086/alluxio,riversand963/alluxio,bf8086/alluxio,WilliamZapata/alluxio,maobaolong/alluxio,yuluo-ding/alluxio,jsimsa/alluxio,wwjiang007/alluxio,ShailShah/alluxio,PasaLab/tachyon,aaudiber/alluxio,jswudi/alluxio,Reidddddd/mo-alluxio,maboelhassan/alluxio,aaudiber/alluxio,Reidddddd/alluxio
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master.file;
import alluxio.AlluxioURI;
import alluxio.Constants;
import alluxio.LocalAlluxioClusterResource;
import alluxio.PropertyKey;
import alluxio.exception.DirectoryNotEmptyException;
import alluxio.exception.ExceptionMessage;
import alluxio.exception.FileAlreadyCompletedException;
import alluxio.exception.FileAlreadyExistsException;
import alluxio.exception.FileDoesNotExistException;
import alluxio.exception.InvalidPathException;
import alluxio.heartbeat.HeartbeatContext;
import alluxio.heartbeat.HeartbeatScheduler;
import alluxio.heartbeat.ManuallyScheduleHeartbeat;
import alluxio.master.MasterTestUtils;
import alluxio.master.block.BlockMaster;
import alluxio.master.file.meta.InodeTree;
import alluxio.master.file.meta.LockedInodePath;
import alluxio.master.file.meta.TtlIntervalRule;
import alluxio.master.file.options.CompleteFileOptions;
import alluxio.master.file.options.CreateDirectoryOptions;
import alluxio.master.file.options.CreateFileOptions;
import alluxio.master.file.options.DeleteOptions;
import alluxio.master.file.options.FreeOptions;
import alluxio.master.file.options.ListStatusOptions;
import alluxio.master.file.options.RenameOptions;
import alluxio.security.authentication.AuthenticatedClientUser;
import alluxio.util.CommonUtils;
import alluxio.util.IdUtils;
import alluxio.wire.FileInfo;
import alluxio.wire.TtlAction;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.Timeout;
import org.mockito.internal.util.reflection.Whitebox;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Test behavior of {@link FileSystemMaster}.
*
* For example, (concurrently) creating/deleting/renaming files.
*/
public class FileSystemMasterIntegrationTest {
private static final int DEPTH = 6;
private static final int FILES_PER_NODE = 4;
private static final int CONCURRENCY_DEPTH = 3;
private static final AlluxioURI ROOT_PATH = new AlluxioURI("/root");
private static final AlluxioURI ROOT_PATH2 = new AlluxioURI("/root2");
// Modify current time so that implementations can't accidentally pass unit tests by ignoring
// this specified time and always using System.currentTimeMillis()
private static final long TEST_TIME_MS = Long.MAX_VALUE;
private static final long TTL_CHECKER_INTERVAL_MS = 1000;
private static final String TEST_USER = "test";
// Time to wait for shutting down thread pool.
private static final long SHUTDOWN_TIME_MS = 15 * Constants.SECOND_MS;
@ClassRule
public static ManuallyScheduleHeartbeat sManuallySchedule =
new ManuallyScheduleHeartbeat(HeartbeatContext.MASTER_TTL_CHECK);
@ClassRule
public static TtlIntervalRule sTtlIntervalRule = new TtlIntervalRule(TTL_CHECKER_INTERVAL_MS);
@Rule
public Timeout mGlobalTimeout = Timeout.seconds(60);
@Rule
public LocalAlluxioClusterResource mLocalAlluxioClusterResource =
new LocalAlluxioClusterResource.Builder()
.setProperty(PropertyKey.MASTER_TTL_CHECKER_INTERVAL_MS,
String.valueOf(TTL_CHECKER_INTERVAL_MS))
.setProperty(PropertyKey.WORKER_MEMORY_SIZE, 1000)
.setProperty(PropertyKey.SECURITY_LOGIN_USERNAME, TEST_USER).build();
@Rule
public ExpectedException mThrown = ExpectedException.none();
private FileSystemMaster mFsMaster;
private InodeTree mInodeTree;
@Before
public final void before() throws Exception {
mFsMaster =
mLocalAlluxioClusterResource.get().getMaster().getInternalMaster().getFileSystemMaster();
AuthenticatedClientUser.set(TEST_USER);
mInodeTree = (InodeTree) Whitebox.getInternalState(mFsMaster, "mInodeTree");
}
@After
public final void after() throws Exception {
AuthenticatedClientUser.remove();
}
@Test
public void clientFileInfoDirectory() throws Exception {
AlluxioURI path = new AlluxioURI("/testFolder");
mFsMaster.createDirectory(path, CreateDirectoryOptions.defaults());
long fileId = mFsMaster.getFileId(path);
FileInfo fileInfo = mFsMaster.getFileInfo(fileId);
Assert.assertEquals("testFolder", fileInfo.getName());
Assert.assertEquals(1, fileInfo.getFileId());
Assert.assertEquals(0, fileInfo.getLength());
Assert.assertFalse(fileInfo.isCacheable());
Assert.assertTrue(fileInfo.isCompleted());
Assert.assertTrue(fileInfo.isFolder());
Assert.assertFalse(fileInfo.isPersisted());
Assert.assertFalse(fileInfo.isPinned());
Assert.assertEquals("", fileInfo.getOwner());
Assert.assertEquals(0755, (short) fileInfo.getMode());
}
@Test
public void clientFileInfoEmptyFile() throws Exception {
long fileId = mFsMaster.createFile(new AlluxioURI("/testFile"), CreateFileOptions.defaults());
FileInfo fileInfo = mFsMaster.getFileInfo(fileId);
Assert.assertEquals("testFile", fileInfo.getName());
Assert.assertEquals(fileId, fileInfo.getFileId());
Assert.assertEquals(0, fileInfo.getLength());
Assert.assertTrue(fileInfo.isCacheable());
Assert.assertFalse(fileInfo.isCompleted());
Assert.assertFalse(fileInfo.isFolder());
Assert.assertFalse(fileInfo.isPersisted());
Assert.assertFalse(fileInfo.isPinned());
Assert.assertEquals(Constants.NO_TTL, fileInfo.getTtl());
Assert.assertEquals(TtlAction.DELETE, fileInfo.getTtlAction());
Assert.assertEquals("", fileInfo.getOwner());
Assert.assertEquals(0644, (short) fileInfo.getMode());
}
private FileSystemMaster createFileSystemMasterFromJournal() throws IOException {
return MasterTestUtils.createLeaderFileSystemMasterFromJournal();
}
// TODO(calvin): This test currently relies on the fact the HDFS client is a cached instance to
// avoid invalid lease exception. This should be fixed.
@Ignore
@Test
public void concurrentCreateJournal() throws Exception {
// Makes sure the file id's are the same between a master info and the journal it creates
for (int i = 0; i < 5; i++) {
ConcurrentCreator concurrentCreator =
new ConcurrentCreator(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentCreator.call();
FileSystemMaster fsMaster = createFileSystemMasterFromJournal();
for (FileInfo info : mFsMaster.listStatus(new AlluxioURI("/"),
ListStatusOptions.defaults())) {
AlluxioURI path = new AlluxioURI(info.getPath());
Assert.assertEquals(mFsMaster.getFileId(path), fsMaster.getFileId(path));
}
before();
}
}
@Test
public void concurrentCreate() throws Exception {
ConcurrentCreator concurrentCreator =
new ConcurrentCreator(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentCreator.call();
}
/**
* Tests concurrent delete of files.
*
* @throws Exception if an error occurs during creating or deleting files
*/
@Test
public void concurrentDelete() throws Exception {
ConcurrentCreator concurrentCreator =
new ConcurrentCreator(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentCreator.call();
ConcurrentDeleter concurrentDeleter =
new ConcurrentDeleter(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentDeleter.call();
Assert.assertEquals(0,
mFsMaster.listStatus(new AlluxioURI("/"), ListStatusOptions.defaults()).size());
}
/**
* Tests concurrent free of files.
*
* @throws Exception if an error occurs during creating or freeing files
*/
@Test
public void concurrentFree() throws Exception {
ConcurrentCreator concurrentCreator =
new ConcurrentCreator(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH,
CreateFileOptions.defaults().setPersisted(true));
concurrentCreator.call();
ConcurrentFreer concurrentFreer = new ConcurrentFreer(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentFreer.call();
}
/**
* Tests concurrent rename of files.
*
* @throws Exception if an error occurs during creating or renaming files
*/
@Test
public void concurrentRename() throws Exception {
ConcurrentCreator concurrentCreator =
new ConcurrentCreator(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentCreator.call();
int numFiles = mFsMaster.listStatus(ROOT_PATH, ListStatusOptions.defaults()).size();
ConcurrentRenamer concurrentRenamer = new ConcurrentRenamer(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH,
ROOT_PATH2, AlluxioURI.EMPTY_URI);
concurrentRenamer.call();
Assert.assertEquals(numFiles,
mFsMaster.listStatus(ROOT_PATH2, ListStatusOptions.defaults()).size());
}
@Test
public void createAlreadyExistFile() throws Exception {
mThrown.expect(FileAlreadyExistsException.class);
mFsMaster.createFile(new AlluxioURI("/testFile"), CreateFileOptions.defaults());
mFsMaster.createDirectory(new AlluxioURI("/testFile"), CreateDirectoryOptions.defaults());
}
@Test
public void createDirectory() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
FileInfo fileInfo = mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertTrue(fileInfo.isFolder());
Assert.assertEquals("", fileInfo.getOwner());
Assert.assertEquals(0755, (short) fileInfo.getMode());
}
@Test
public void createFileInvalidPath() throws Exception {
mThrown.expect(InvalidPathException.class);
mFsMaster.createFile(new AlluxioURI("testFile"), CreateFileOptions.defaults());
}
@Test
public void createFileInvalidPathTest2() throws Exception {
mThrown.expect(FileAlreadyExistsException.class);
mFsMaster.createFile(new AlluxioURI("/"), CreateFileOptions.defaults());
}
@Test
public void createFileInvalidPathTest3() throws Exception {
mThrown.expect(InvalidPathException.class);
mFsMaster.createFile(new AlluxioURI("/testFile1"), CreateFileOptions.defaults());
mFsMaster.createFile(new AlluxioURI("/testFile1/testFile2"), CreateFileOptions.defaults());
}
@Test
public void createFilePerf() throws Exception {
for (int k = 0; k < 200; k++) {
CreateDirectoryOptions options = CreateDirectoryOptions.defaults().setRecursive(true);
mFsMaster.createDirectory(
new AlluxioURI("/testFile").join(Constants.MASTER_COLUMN_FILE_PREFIX + k).join("0"),
options);
}
for (int k = 0; k < 200; k++) {
mFsMaster.getFileInfo(mFsMaster.getFileId(
new AlluxioURI("/testFile").join(Constants.MASTER_COLUMN_FILE_PREFIX + k).join("0")));
}
}
@Test
public void createFile() throws Exception {
mFsMaster.createFile(new AlluxioURI("/testFile"), CreateFileOptions.defaults());
FileInfo fileInfo = mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFile")));
Assert.assertFalse(fileInfo.isFolder());
Assert.assertEquals("", fileInfo.getOwner());
Assert.assertEquals(0644, (short) fileInfo.getMode());
}
@Test
public void deleteDirectoryWithDirectories() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
mFsMaster.createDirectory(new AlluxioURI("/testFolder/testFolder2"),
CreateDirectoryOptions.defaults());
long fileId =
mFsMaster.createFile(new AlluxioURI("/testFolder/testFile"), CreateFileOptions.defaults());
long fileId2 = mFsMaster.createFile(new AlluxioURI("/testFolder/testFolder2/testFile2"),
CreateFileOptions.defaults());
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(2, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
Assert.assertEquals(fileId2,
mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2/testFile2")));
mFsMaster.delete(new AlluxioURI("/testFolder"), DeleteOptions.defaults()
.setRecursive(true));
Assert.assertEquals(IdUtils.INVALID_FILE_ID,
mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2/testFile2")));
}
@Test
public void deleteDirectoryWithDirectoriesTest2() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
mFsMaster.createDirectory(new AlluxioURI("/testFolder/testFolder2"),
CreateDirectoryOptions.defaults());
long fileId =
mFsMaster.createFile(new AlluxioURI("/testFolder/testFile"), CreateFileOptions.defaults());
long fileId2 = mFsMaster.createFile(new AlluxioURI("/testFolder/testFolder2/testFile2"),
CreateFileOptions.defaults());
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(2, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
Assert.assertEquals(fileId2,
mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2/testFile2")));
try {
mFsMaster.delete(new AlluxioURI("/testFolder/testFolder2"), DeleteOptions.defaults()
.setRecursive(false));
Assert.fail("Deleting a nonempty directory nonrecursively should fail");
} catch (DirectoryNotEmptyException e) {
Assert.assertEquals(
ExceptionMessage.DELETE_NONEMPTY_DIRECTORY_NONRECURSIVE.getMessage("testFolder2"),
e.getMessage());
}
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(2, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
Assert.assertEquals(fileId2,
mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2/testFile2")));
}
@Test
public void deleteDirectoryWithFiles() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long fileId =
mFsMaster.createFile(new AlluxioURI("/testFolder/testFile"), CreateFileOptions.defaults());
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
mFsMaster.delete(new AlluxioURI("/testFolder"), DeleteOptions.defaults()
.setRecursive(true));
Assert.assertEquals(IdUtils.INVALID_FILE_ID,
mFsMaster.getFileId(new AlluxioURI("/testFolder")));
}
@Test
public void deleteDirectoryWithFilesTest2() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long fileId =
mFsMaster.createFile(new AlluxioURI("/testFolder/testFile"), CreateFileOptions.defaults());
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
try {
mFsMaster.delete(new AlluxioURI("/testFolder"), DeleteOptions.defaults()
.setRecursive(false));
Assert.fail("Deleting a nonempty directory nonrecursively should fail");
} catch (DirectoryNotEmptyException e) {
Assert.assertEquals(
ExceptionMessage.DELETE_NONEMPTY_DIRECTORY_NONRECURSIVE.getMessage("testFolder"),
e.getMessage());
}
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
}
@Test
public void deleteEmptyDirectory() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
mFsMaster.delete(new AlluxioURI("/testFolder"), DeleteOptions.defaults()
.setRecursive(true));
Assert.assertEquals(IdUtils.INVALID_FILE_ID,
mFsMaster.getFileId(new AlluxioURI("/testFolder")));
}
@Test
public void deleteFile() throws Exception {
long fileId = mFsMaster.createFile(new AlluxioURI("/testFile"), CreateFileOptions.defaults());
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFile")));
mFsMaster.delete(new AlluxioURI("/testFile"), DeleteOptions.defaults().setRecursive(true));
Assert.assertEquals(IdUtils.INVALID_FILE_ID, mFsMaster.getFileId(new AlluxioURI("/testFile")));
}
@Test
public void deleteRoot() throws Exception {
mThrown.expect(InvalidPathException.class);
mThrown.expectMessage(ExceptionMessage.DELETE_ROOT_DIRECTORY.getMessage());
mFsMaster.delete(new AlluxioURI("/"), DeleteOptions.defaults().setRecursive(true));
}
@Test
public void getCapacityBytes() {
BlockMaster blockMaster =
mLocalAlluxioClusterResource.get().getMaster().getInternalMaster().getBlockMaster();
Assert.assertEquals(1000, blockMaster.getCapacityBytes());
}
@Test
public void lastModificationTimeCompleteFile() throws Exception {
long fileId = mFsMaster.createFile(new AlluxioURI("/testFile"), CreateFileOptions.defaults());
long opTimeMs = TEST_TIME_MS;
mFsMaster.completeFile(new AlluxioURI("/testFile"),
CompleteFileOptions.defaults().setOperationTimeMs(opTimeMs).setUfsLength(0));
FileInfo fileInfo = mFsMaster.getFileInfo(fileId);
Assert.assertEquals(opTimeMs, fileInfo.getLastModificationTimeMs());
}
@Test
public void lastModificationTimeCreateFile() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long opTimeMs = TEST_TIME_MS;
CreateFileOptions options = CreateFileOptions.defaults().setOperationTimeMs(opTimeMs);
try (LockedInodePath inodePath = mInodeTree
.lockInodePath(new AlluxioURI("/testFolder/testFile"), InodeTree.LockMode.WRITE)) {
mFsMaster.createFileInternal(inodePath, options);
}
FileInfo folderInfo = mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(opTimeMs, folderInfo.getLastModificationTimeMs());
}
/**
* Tests that deleting a file from a folder updates the folder's last modification time.
*/
@Test
public void lastModificationTimeDelete() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
mFsMaster.createFile(new AlluxioURI("/testFolder/testFile"), CreateFileOptions.defaults());
long folderId = mFsMaster.getFileId(new AlluxioURI("/testFolder"));
long modificationTimeBeforeDelete = mFsMaster.getFileInfo(folderId).getLastModificationTimeMs();
CommonUtils.sleepMs(2);
mFsMaster.delete(new AlluxioURI("/testFolder/testFile"), DeleteOptions.defaults()
.setRecursive(true));
long modificationTimeAfterDelete = mFsMaster.getFileInfo(folderId).getLastModificationTimeMs();
Assert.assertTrue(modificationTimeBeforeDelete < modificationTimeAfterDelete);
}
@Test
public void lastModificationTimeRename() throws Exception {
AlluxioURI srcPath = new AlluxioURI("/testFolder/testFile1");
AlluxioURI dstPath = new AlluxioURI("/testFolder/testFile2");
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
mFsMaster.createFile(srcPath, CreateFileOptions.defaults());
RenameOptions options = RenameOptions.defaults().setOperationTimeMs(TEST_TIME_MS);
mFsMaster.rename(srcPath, dstPath, options);
FileInfo folderInfo = mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(TEST_TIME_MS, folderInfo.getLastModificationTimeMs());
}
@Test
public void listFiles() throws Exception {
CreateFileOptions options = CreateFileOptions.defaults().setBlockSizeBytes(64);
HashSet<Long> ids = new HashSet<>();
HashSet<Long> dirIds = new HashSet<>();
for (int i = 0; i < 10; i++) {
AlluxioURI dir = new AlluxioURI("/i" + i);
mFsMaster.createDirectory(dir, CreateDirectoryOptions.defaults());
dirIds.add(mFsMaster.getFileId(dir));
for (int j = 0; j < 10; j++) {
ids.add(mFsMaster.createFile(dir.join("j" + j), options));
}
}
HashSet<Long> listedIds = new HashSet<>();
HashSet<Long> listedDirIds = new HashSet<>();
List<FileInfo> infoList =
mFsMaster.listStatus(new AlluxioURI("/"), ListStatusOptions.defaults());
for (FileInfo info : infoList) {
long id = info.getFileId();
listedDirIds.add(id);
for (FileInfo fileInfo : mFsMaster.listStatus(new AlluxioURI(info.getPath()),
ListStatusOptions.defaults())) {
listedIds.add(fileInfo.getFileId());
}
}
Assert.assertEquals(ids, listedIds);
Assert.assertEquals(dirIds, listedDirIds);
}
@Test
public void ls() throws Exception {
CreateFileOptions options = CreateFileOptions.defaults().setBlockSizeBytes(64);
for (int i = 0; i < 10; i++) {
mFsMaster.createDirectory(new AlluxioURI("/i" + i), CreateDirectoryOptions.defaults());
for (int j = 0; j < 10; j++) {
mFsMaster.createFile(new AlluxioURI("/i" + i + "/j" + j), options);
}
}
Assert.assertEquals(1,
mFsMaster.listStatus(new AlluxioURI("/i0/j0"), ListStatusOptions.defaults()).size());
for (int i = 0; i < 10; i++) {
Assert.assertEquals(10,
mFsMaster.listStatus(new AlluxioURI("/i" + i), ListStatusOptions.defaults()).size());
}
Assert.assertEquals(10,
mFsMaster.listStatus(new AlluxioURI("/"), ListStatusOptions.defaults()).size());
}
@Test
public void notFileCompletion() throws Exception {
mThrown.expect(FileDoesNotExistException.class);
mFsMaster.createDirectory(new AlluxioURI("/testFile"), CreateDirectoryOptions.defaults());
CompleteFileOptions options = CompleteFileOptions.defaults();
mFsMaster.completeFile(new AlluxioURI("/testFile"), options);
}
@Test
public void renameExistingDst() throws Exception {
mFsMaster.createFile(new AlluxioURI("/testFile1"), CreateFileOptions.defaults());
mFsMaster.createFile(new AlluxioURI("/testFile2"), CreateFileOptions.defaults());
try {
mFsMaster.rename(new AlluxioURI("/testFile1"), new AlluxioURI("/testFile2"),
RenameOptions.defaults());
Assert.fail("Should not be able to rename to an existing file");
} catch (Exception e) {
// expected
}
}
@Test
public void renameNonexistent() throws Exception {
mFsMaster.createFile(new AlluxioURI("/testFile1"), CreateFileOptions.defaults());
Assert.assertEquals(IdUtils.INVALID_FILE_ID, mFsMaster.getFileId(new AlluxioURI("/testFile2")));
}
@Test
public void renameToDeeper() throws Exception {
CreateFileOptions createFileOptions = CreateFileOptions.defaults().setRecursive(true);
CreateDirectoryOptions createDirectoryOptions =
CreateDirectoryOptions.defaults().setRecursive(true);
mThrown.expect(InvalidPathException.class);
mFsMaster.createDirectory(new AlluxioURI("/testDir1/testDir2"), createDirectoryOptions);
mFsMaster.createFile(new AlluxioURI("/testDir1/testDir2/testDir3/testFile3"),
createFileOptions);
mFsMaster.rename(new AlluxioURI("/testDir1/testDir2"),
new AlluxioURI("/testDir1/testDir2/testDir3/testDir4"), RenameOptions.defaults());
}
@Test
public void ttlCreateFile() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long ttl = 100;
CreateFileOptions options = CreateFileOptions.defaults().setTtl(ttl);
options.setTtlAction(TtlAction.FREE);
try (LockedInodePath inodePath = mInodeTree
.lockInodePath(new AlluxioURI("/testFolder/testFile"), InodeTree.LockMode.WRITE)) {
mFsMaster.createFileInternal(inodePath, options);
}
FileInfo folderInfo =
mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
Assert.assertEquals(ttl, folderInfo.getTtl());
Assert.assertEquals(TtlAction.FREE, folderInfo.getTtlAction());
}
@Test
public void ttlExpiredCreateFile() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long ttl = 1;
CreateFileOptions options = CreateFileOptions.defaults().setTtl(ttl);
long fileId = mFsMaster.createFile(new AlluxioURI("/testFolder/testFile1"), options);
FileInfo folderInfo =
mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile1")));
Assert.assertEquals(fileId, folderInfo.getFileId());
Assert.assertEquals(ttl, folderInfo.getTtl());
// Sleep for the ttl expiration.
CommonUtils.sleepMs(2 * TTL_CHECKER_INTERVAL_MS);
HeartbeatScheduler.execute(HeartbeatContext.MASTER_TTL_CHECK);
mThrown.expect(FileDoesNotExistException.class);
mFsMaster.getFileInfo(fileId);
}
@Test
public void ttlExpiredCreateFileWithFreeActionTest() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long ttl = 1;
CreateFileOptions options =
CreateFileOptions.defaults().setPersisted(true).setTtl(ttl).setTtlAction(TtlAction.FREE);
long fileId = mFsMaster.createFile(new AlluxioURI("/testFolder/testFile1"), options);
FileInfo folderInfo =
mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile1")));
Assert.assertEquals(fileId, folderInfo.getFileId());
Assert.assertEquals(ttl, folderInfo.getTtl());
Assert.assertEquals(TtlAction.FREE, folderInfo.getTtlAction());
// Sleep for the ttl expiration.
CommonUtils.sleepMs(2 * TTL_CHECKER_INTERVAL_MS);
HeartbeatScheduler.await(HeartbeatContext.MASTER_TTL_CHECK, 10, TimeUnit.SECONDS);
HeartbeatScheduler.schedule(HeartbeatContext.MASTER_TTL_CHECK);
HeartbeatScheduler.await(HeartbeatContext.MASTER_TTL_CHECK, 10, TimeUnit.SECONDS);
FileInfo fileInfo = mFsMaster.getFileInfo(fileId);
Assert.assertEquals(Constants.NO_TTL, fileInfo.getTtl());
Assert.assertEquals(TtlAction.DELETE, fileInfo.getTtlAction());
}
@Test
public void ttlRename() throws Exception {
AlluxioURI srcPath = new AlluxioURI("/testFolder/testFile1");
AlluxioURI dstPath = new AlluxioURI("/testFolder/testFile2");
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long ttl = 1;
CreateFileOptions createOptions = CreateFileOptions.defaults().setTtl(ttl);
mFsMaster.createFile(srcPath, createOptions);
RenameOptions renameOptions = RenameOptions.defaults().setOperationTimeMs(TEST_TIME_MS);
mFsMaster.rename(srcPath, dstPath, renameOptions);
FileInfo folderInfo =
mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile2")));
Assert.assertEquals(ttl, folderInfo.getTtl());
}
@Test
public void concurrentCreateDelete() throws Exception {
List<Future<?>> futures = new ArrayList<>();
AlluxioURI directory = new AlluxioURI("/dir");
AlluxioURI[] files = new AlluxioURI[10];
final int numThreads = 8;
final int testDurationMs = 3000;
for (int i = 0; i < 10; i++) {
files[i] = directory.join("file_" + i);
}
mFsMaster.createDirectory(directory, CreateDirectoryOptions.defaults());
AtomicBoolean stopThreads = new AtomicBoolean(false);
CyclicBarrier barrier = new CyclicBarrier(numThreads);
ExecutorService threadPool = Executors.newCachedThreadPool();
try {
for (int i = 0; i < numThreads; i++) {
futures.add(threadPool.submit(new ConcurrentCreateDelete(barrier, stopThreads, files)));
}
CommonUtils.sleepMs(testDurationMs);
stopThreads.set(true);
for (Future<?> future : futures) {
future.get();
}
// Stop Alluxio.
mLocalAlluxioClusterResource.get().stopFS();
// Create the master using the existing journal.
createFileSystemMasterFromJournal();
} finally {
threadPool.shutdownNow();
threadPool.awaitTermination(SHUTDOWN_TIME_MS, TimeUnit.MILLISECONDS);
}
}
// TODO(gene): Journal format has changed, maybe add Version to the format and add this test back
// or remove this test when we have better tests against journal checkpoint.
// @Test
// public void writeImage() throws IOException {
// // initialize the MasterInfo
// Journal journal =
// new Journal(mLocalAlluxioCluster.getAlluxioHome() + "journal/", "image.data", "log.data",
// mMasterAlluxioConf);
// Journal
// MasterInfo info =
// new MasterInfo(new InetSocketAddress(9999), journal, mExecutorService, mMasterAlluxioConf);
// // create the output streams
// ByteArrayOutputStream os = new ByteArrayOutputStream();
// DataOutputStream dos = new DataOutputStream(os);
// ObjectMapper mapper = JsonObject.createObjectMapper();
// ObjectWriter writer = mapper.writer();
// ImageElement version = null;
// ImageElement checkpoint = null;
// // write the image
// info.writeImage(writer, dos);
// // parse the written bytes and look for the Checkpoint and Version ImageElements
// String[] splits = new String(os.toByteArray()).split("\n");
// for (String split : splits) {
// byte[] bytes = split.getBytes();
// JsonParser parser = mapper.getFactory().createParser(bytes);
// ImageElement ele = parser.readValueAs(ImageElement.class);
// if (ele.mType.equals(ImageElementType.Checkpoint)) {
// checkpoint = ele;
// }
// if (ele.mType.equals(ImageElementType.Version)) {
// version = ele;
// }
// }
// // test the elements
// Assert.assertNotNull(checkpoint);
// Assert.assertEquals(checkpoint.mType, ImageElementType.Checkpoint);
// Assert.assertEquals(Constants.JOURNAL_VERSION, version.getInt("version").intValue());
// Assert.assertEquals(1, checkpoint.getInt("inodeCounter").intValue());
// Assert.assertEquals(0, checkpoint.getInt("editTransactionCounter").intValue());
// Assert.assertEquals(0, checkpoint.getInt("dependencyCounter").intValue());
// }
/**
* This class provides multiple concurrent threads to create all files in one directory.
*/
class ConcurrentCreator implements Callable<Void> {
private int mDepth;
private int mConcurrencyDepth;
private AlluxioURI mInitPath;
private CreateFileOptions mCreateFileOptions;
/**
* Constructs the concurrent creator.
*
* @param depth the depth of files to be created in one directory
* @param concurrencyDepth the concurrency depth of files to be created in one directory
* @param initPath the directory of files to be created in
*/
ConcurrentCreator(int depth, int concurrencyDepth, AlluxioURI initPath) {
this(depth, concurrencyDepth, initPath, CreateFileOptions.defaults());
}
ConcurrentCreator(int depth, int concurrencyDepth, AlluxioURI initPath,
CreateFileOptions options) {
mDepth = depth;
mConcurrencyDepth = concurrencyDepth;
mInitPath = initPath;
mCreateFileOptions = options;
}
/**
* Authenticates the client user named TEST_USER and executes the process of creating all
* files in one directory by multiple concurrent threads.
*
* @return null
* @throws Exception if an exception occurs
*/
@Override
public Void call() throws Exception {
AuthenticatedClientUser.set(TEST_USER);
exec(mDepth, mConcurrencyDepth, mInitPath);
return null;
}
/**
* Executes the process of creating all files in one directory by multiple concurrent threads.
*
* @param depth the depth of files to be created in one directory
* @param concurrencyDepth the concurrency depth of files to be created in one directory
* @param path the directory of files to be created in
* @throws Exception if an exception occurs
*/
public void exec(int depth, int concurrencyDepth, AlluxioURI path) throws Exception {
if (depth < 1) {
return;
} else if (depth == 1) {
long fileId = mFsMaster.createFile(path, mCreateFileOptions);
Assert.assertEquals(fileId, mFsMaster.getFileId(path));
// verify the user permission for file
FileInfo fileInfo = mFsMaster.getFileInfo(fileId);
Assert.assertEquals("", fileInfo.getOwner());
Assert.assertEquals(0644, (short) fileInfo.getMode());
} else {
mFsMaster.createDirectory(path, CreateDirectoryOptions.defaults());
Assert.assertNotNull(mFsMaster.getFileId(path));
long dirId = mFsMaster.getFileId(path);
Assert.assertNotEquals(-1, dirId);
FileInfo dirInfo = mFsMaster.getFileInfo(dirId);
Assert.assertEquals("", dirInfo.getOwner());
Assert.assertEquals(0755, (short) dirInfo.getMode());
}
if (concurrencyDepth > 0) {
ExecutorService executor = Executors.newCachedThreadPool();
try {
ArrayList<Future<Void>> futures = new ArrayList<>(FILES_PER_NODE);
for (int i = 0; i < FILES_PER_NODE; i++) {
Callable<Void> call = (new ConcurrentCreator(depth - 1, concurrencyDepth - 1,
path.join(Integer.toString(i)), mCreateFileOptions));
futures.add(executor.submit(call));
}
for (Future<Void> f : futures) {
f.get();
}
} finally {
executor.shutdown();
}
} else {
for (int i = 0; i < FILES_PER_NODE; i++) {
exec(depth - 1, concurrencyDepth, path.join(Integer.toString(i)));
}
}
}
}
/**
* This class provides multiple concurrent threads to free all files in one directory.
*/
class ConcurrentFreer implements Callable<Void> {
private int mDepth;
private int mConcurrencyDepth;
private AlluxioURI mInitPath;
ConcurrentFreer(int depth, int concurrencyDepth, AlluxioURI initPath) {
mDepth = depth;
mConcurrencyDepth = concurrencyDepth;
mInitPath = initPath;
}
@Override
public Void call() throws Exception {
AuthenticatedClientUser.set(TEST_USER);
exec(mDepth, mConcurrencyDepth, mInitPath);
return null;
}
private void doFree(AlluxioURI path) throws Exception {
mFsMaster.free(path, FreeOptions.defaults().setForced(true).setRecursive(true));
Assert.assertNotEquals(IdUtils.INVALID_FILE_ID, mFsMaster.getFileId(path));
}
public void exec(int depth, int concurrencyDepth, AlluxioURI path) throws Exception {
if (depth < 1) {
return;
} else if (depth == 1 || (path.hashCode() % 10 == 0)) {
// Sometimes we want to try freeing a path when we're not all the way down, which is what
// the second condition is for.
doFree(path);
} else {
if (concurrencyDepth > 0) {
ExecutorService executor = Executors.newCachedThreadPool();
try {
ArrayList<Future<Void>> futures = new ArrayList<>(FILES_PER_NODE);
for (int i = 0; i < FILES_PER_NODE; i++) {
Callable<Void> call = (new ConcurrentDeleter(depth - 1, concurrencyDepth - 1,
path.join(Integer.toString(i))));
futures.add(executor.submit(call));
}
for (Future<Void> f : futures) {
f.get();
}
} finally {
executor.shutdown();
}
} else {
for (int i = 0; i < FILES_PER_NODE; i++) {
exec(depth - 1, concurrencyDepth, path.join(Integer.toString(i)));
}
}
doFree(path);
}
}
}
/**
* This class provides multiple concurrent threads to delete all files in one directory.
*/
class ConcurrentDeleter implements Callable<Void> {
private int mDepth;
private int mConcurrencyDepth;
private AlluxioURI mInitPath;
/**
* Constructs the concurrent deleter.
*
* @param depth the depth of files to be deleted in one directory
* @param concurrencyDepth the concurrency depth of files to be deleted in one directory
* @param initPath the directory of files to be deleted in
*/
ConcurrentDeleter(int depth, int concurrencyDepth, AlluxioURI initPath) {
mDepth = depth;
mConcurrencyDepth = concurrencyDepth;
mInitPath = initPath;
}
@Override
public Void call() throws Exception {
AuthenticatedClientUser.set(TEST_USER);
exec(mDepth, mConcurrencyDepth, mInitPath);
return null;
}
private void doDelete(AlluxioURI path) throws Exception {
mFsMaster.delete(path, DeleteOptions.defaults().setRecursive(true));
Assert.assertEquals(IdUtils.INVALID_FILE_ID, mFsMaster.getFileId(path));
}
/**
* Executes the process of deleting all files in one directory by multiple concurrent threads.
*
* @param depth the depth of files to be deleted in one directory
* @param concurrencyDepth the concurrency depth of files to be deleted in one directory
* @param path the directory of files to be deleted in
* @throws Exception if an exception occurs
*/
public void exec(int depth, int concurrencyDepth, AlluxioURI path) throws Exception {
if (depth < 1) {
return;
} else if (depth == 1 || (path.hashCode() % 10 == 0)) {
// Sometimes we want to try deleting a path when we're not all the way down, which is what
// the second condition is for.
doDelete(path);
} else {
if (concurrencyDepth > 0) {
ExecutorService executor = Executors.newCachedThreadPool();
try {
ArrayList<Future<Void>> futures = new ArrayList<>(FILES_PER_NODE);
for (int i = 0; i < FILES_PER_NODE; i++) {
Callable<Void> call = (new ConcurrentDeleter(depth - 1, concurrencyDepth - 1,
path.join(Integer.toString(i))));
futures.add(executor.submit(call));
}
for (Future<Void> f : futures) {
f.get();
}
} finally {
executor.shutdown();
}
} else {
for (int i = 0; i < FILES_PER_NODE; i++) {
exec(depth - 1, concurrencyDepth, path.join(Integer.toString(i)));
}
}
doDelete(path);
}
}
}
class ConcurrentRenamer implements Callable<Void> {
private int mDepth;
private int mConcurrencyDepth;
private AlluxioURI mRootPath;
private AlluxioURI mRootPath2;
private AlluxioURI mInitPath;
ConcurrentRenamer(int depth, int concurrencyDepth, AlluxioURI rootPath, AlluxioURI rootPath2,
AlluxioURI initPath) {
mDepth = depth;
mConcurrencyDepth = concurrencyDepth;
mRootPath = rootPath;
mRootPath2 = rootPath2;
mInitPath = initPath;
}
@Override
public Void call() throws Exception {
AuthenticatedClientUser.set(TEST_USER);
exec(mDepth, mConcurrencyDepth, mInitPath);
return null;
}
public void exec(int depth, int concurrencyDepth, AlluxioURI path) throws Exception {
if (depth < 1) {
return;
} else if (depth == 1 || (depth < mDepth && path.hashCode() % 10 < 3)) {
// Sometimes we want to try renaming a path when we're not all the way down, which is what
// the second condition is for. We have to create the path in the destination up till what
// we're renaming. This might already exist, so createFile could throw a
// FileAlreadyExistsException, which we silently handle.
AlluxioURI srcPath = mRootPath.join(path);
AlluxioURI dstPath = mRootPath2.join(path);
long fileId = mFsMaster.getFileId(srcPath);
try {
CreateDirectoryOptions options = CreateDirectoryOptions.defaults().setRecursive(true);
mFsMaster.createDirectory(dstPath.getParent(), options);
} catch (FileAlreadyExistsException | InvalidPathException e) {
// FileAlreadyExistsException: This is an acceptable exception to get, since we don't know
// if the parent has been created yet by another thread.
// InvalidPathException: This could happen if we are renaming something that's a child of
// the root.
}
mFsMaster.rename(srcPath, dstPath, RenameOptions.defaults());
Assert.assertEquals(fileId, mFsMaster.getFileId(dstPath));
} else if (concurrencyDepth > 0) {
ExecutorService executor = Executors.newCachedThreadPool();
try {
ArrayList<Future<Void>> futures = new ArrayList<>(FILES_PER_NODE);
for (int i = 0; i < FILES_PER_NODE; i++) {
Callable<Void> call = (new ConcurrentRenamer(depth - 1, concurrencyDepth - 1, mRootPath,
mRootPath2, path.join(Integer.toString(i))));
futures.add(executor.submit(call));
}
for (Future<Void> f : futures) {
f.get();
}
} finally {
executor.shutdown();
}
} else {
for (int i = 0; i < FILES_PER_NODE; i++) {
exec(depth - 1, concurrencyDepth, path.join(Integer.toString(i)));
}
}
}
}
/**
* A class to start a thread that creates a file, completes the file and then deletes the file.
*/
private class ConcurrentCreateDelete implements Callable<Void> {
private final CyclicBarrier mStartBarrier;
private final AtomicBoolean mStopThread;
private final AlluxioURI[] mFiles;
public ConcurrentCreateDelete(CyclicBarrier barrier, AtomicBoolean stopThread,
AlluxioURI[] files) {
mStartBarrier = barrier;
mStopThread = stopThread;
mFiles = files;
}
@Override
public Void call() throws Exception {
AuthenticatedClientUser.set(TEST_USER);
mStartBarrier.await();
Random random = new Random();
while (!mStopThread.get()) {
int id = random.nextInt(mFiles.length);
try {
// Create and complete a random file.
mFsMaster.createFile(mFiles[id], CreateFileOptions.defaults());
mFsMaster.completeFile(mFiles[id], CompleteFileOptions.defaults());
} catch (FileAlreadyExistsException | FileDoesNotExistException
| FileAlreadyCompletedException e) {
// Ignore
} catch (Exception e) {
throw e;
}
id = random.nextInt(mFiles.length);
try {
// Delete a random file.
mFsMaster.delete(mFiles[id], DeleteOptions.defaults().setRecursive(false));
} catch (FileDoesNotExistException e) {
// Ignore
} catch (Exception e) {
throw e;
}
}
return null;
}
}
}
|
tests/src/test/java/alluxio/master/file/FileSystemMasterIntegrationTest.java
|
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.master.file;
import alluxio.AlluxioURI;
import alluxio.Constants;
import alluxio.LocalAlluxioClusterResource;
import alluxio.PropertyKey;
import alluxio.exception.DirectoryNotEmptyException;
import alluxio.exception.ExceptionMessage;
import alluxio.exception.FileAlreadyCompletedException;
import alluxio.exception.FileAlreadyExistsException;
import alluxio.exception.FileDoesNotExistException;
import alluxio.exception.InvalidPathException;
import alluxio.heartbeat.HeartbeatContext;
import alluxio.heartbeat.HeartbeatScheduler;
import alluxio.heartbeat.ManuallyScheduleHeartbeat;
import alluxio.master.MasterTestUtils;
import alluxio.master.block.BlockMaster;
import alluxio.master.file.meta.InodeTree;
import alluxio.master.file.meta.LockedInodePath;
import alluxio.master.file.meta.TtlIntervalRule;
import alluxio.master.file.options.CompleteFileOptions;
import alluxio.master.file.options.CreateDirectoryOptions;
import alluxio.master.file.options.CreateFileOptions;
import alluxio.master.file.options.DeleteOptions;
import alluxio.master.file.options.FreeOptions;
import alluxio.master.file.options.ListStatusOptions;
import alluxio.master.file.options.RenameOptions;
import alluxio.security.authentication.AuthenticatedClientUser;
import alluxio.util.CommonUtils;
import alluxio.util.IdUtils;
import alluxio.wire.FileInfo;
import alluxio.wire.TtlAction;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.rules.Timeout;
import org.mockito.internal.util.reflection.Whitebox;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Test behavior of {@link FileSystemMaster}.
*
* For example, (concurrently) creating/deleting/renaming files.
*/
public class FileSystemMasterIntegrationTest {
private static final int DEPTH = 6;
private static final int FILES_PER_NODE = 4;
private static final int CONCURRENCY_DEPTH = 3;
private static final AlluxioURI ROOT_PATH = new AlluxioURI("/root");
private static final AlluxioURI ROOT_PATH2 = new AlluxioURI("/root2");
// Modify current time so that implementations can't accidentally pass unit tests by ignoring
// this specified time and always using System.currentTimeMillis()
private static final long TEST_TIME_MS = Long.MAX_VALUE;
private static final long TTL_CHECKER_INTERVAL_MS = 1000;
private static final String TEST_USER = "test";
// Time to wait for shutting down thread pool.
private static final long SHUTDOWN_TIME_MS = 15 * Constants.SECOND_MS;
@ClassRule
public static ManuallyScheduleHeartbeat sManuallySchedule =
new ManuallyScheduleHeartbeat(HeartbeatContext.MASTER_TTL_CHECK);
@ClassRule
public static TtlIntervalRule sTtlIntervalRule = new TtlIntervalRule(TTL_CHECKER_INTERVAL_MS);
@Rule
public Timeout mGlobalTimeout = Timeout.seconds(60);
@Rule
public LocalAlluxioClusterResource mLocalAlluxioClusterResource =
new LocalAlluxioClusterResource.Builder()
.setProperty(PropertyKey.MASTER_TTL_CHECKER_INTERVAL_MS,
String.valueOf(TTL_CHECKER_INTERVAL_MS))
.setProperty(PropertyKey.WORKER_MEMORY_SIZE, 1000)
.setProperty(PropertyKey.SECURITY_LOGIN_USERNAME, TEST_USER).build();
@Rule
public ExpectedException mThrown = ExpectedException.none();
private FileSystemMaster mFsMaster;
private InodeTree mInodeTree;
@Before
public final void before() throws Exception {
mFsMaster =
mLocalAlluxioClusterResource.get().getMaster().getInternalMaster().getFileSystemMaster();
AuthenticatedClientUser.set(TEST_USER);
mInodeTree = (InodeTree) Whitebox.getInternalState(mFsMaster, "mInodeTree");
}
@After
public final void after() throws Exception {
AuthenticatedClientUser.remove();
}
@Test
public void clientFileInfoDirectory() throws Exception {
AlluxioURI path = new AlluxioURI("/testFolder");
mFsMaster.createDirectory(path, CreateDirectoryOptions.defaults());
long fileId = mFsMaster.getFileId(path);
FileInfo fileInfo = mFsMaster.getFileInfo(fileId);
Assert.assertEquals("testFolder", fileInfo.getName());
Assert.assertEquals(1, fileInfo.getFileId());
Assert.assertEquals(0, fileInfo.getLength());
Assert.assertFalse(fileInfo.isCacheable());
Assert.assertTrue(fileInfo.isCompleted());
Assert.assertTrue(fileInfo.isFolder());
Assert.assertFalse(fileInfo.isPersisted());
Assert.assertFalse(fileInfo.isPinned());
Assert.assertEquals("", fileInfo.getOwner());
Assert.assertEquals(0755, (short) fileInfo.getMode());
}
@Test
public void clientFileInfoEmptyFile() throws Exception {
long fileId = mFsMaster.createFile(new AlluxioURI("/testFile"), CreateFileOptions.defaults());
FileInfo fileInfo = mFsMaster.getFileInfo(fileId);
Assert.assertEquals("testFile", fileInfo.getName());
Assert.assertEquals(fileId, fileInfo.getFileId());
Assert.assertEquals(0, fileInfo.getLength());
Assert.assertTrue(fileInfo.isCacheable());
Assert.assertFalse(fileInfo.isCompleted());
Assert.assertFalse(fileInfo.isFolder());
Assert.assertFalse(fileInfo.isPersisted());
Assert.assertFalse(fileInfo.isPinned());
Assert.assertEquals(Constants.NO_TTL, fileInfo.getTtl());
Assert.assertEquals(TtlAction.DELETE, fileInfo.getTtlAction());
Assert.assertEquals("", fileInfo.getOwner());
Assert.assertEquals(0644, (short) fileInfo.getMode());
}
private FileSystemMaster createFileSystemMasterFromJournal() throws IOException {
return MasterTestUtils.createLeaderFileSystemMasterFromJournal();
}
// TODO(calvin): This test currently relies on the fact the HDFS client is a cached instance to
// avoid invalid lease exception. This should be fixed.
@Ignore
@Test
public void concurrentCreateJournal() throws Exception {
// Makes sure the file id's are the same between a master info and the journal it creates
for (int i = 0; i < 5; i++) {
ConcurrentCreator concurrentCreator =
new ConcurrentCreator(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentCreator.call();
FileSystemMaster fsMaster = createFileSystemMasterFromJournal();
for (FileInfo info : mFsMaster.listStatus(new AlluxioURI("/"),
ListStatusOptions.defaults())) {
AlluxioURI path = new AlluxioURI(info.getPath());
Assert.assertEquals(mFsMaster.getFileId(path), fsMaster.getFileId(path));
}
before();
}
}
@Test
public void concurrentCreate() throws Exception {
ConcurrentCreator concurrentCreator =
new ConcurrentCreator(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentCreator.call();
}
/**
* Tests concurrent delete of files.
*
* @throws Exception if an error occurs during creating or deleting files
*/
@Test
public void concurrentDelete() throws Exception {
ConcurrentCreator concurrentCreator =
new ConcurrentCreator(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentCreator.call();
ConcurrentDeleter concurrentDeleter =
new ConcurrentDeleter(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentDeleter.call();
Assert.assertEquals(0,
mFsMaster.listStatus(new AlluxioURI("/"), ListStatusOptions.defaults()).size());
}
/**
* Tests concurrent free of files.
*
* @throws Exception if an error occurs during creating or freeing files
*/
@Test
public void concurrentFree() throws Exception {
ConcurrentCreator concurrentCreator =
new ConcurrentCreator(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH,
CreateFileOptions.defaults().setPersisted(true));
concurrentCreator.call();
ConcurrentFreer concurrentFreer = new ConcurrentFreer(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentFreer.call();
}
/**
* Tests concurrent rename of files.
*
* @throws Exception if an error occurs during creating or renaming files
*/
@Test
public void concurrentRename() throws Exception {
ConcurrentCreator concurrentCreator =
new ConcurrentCreator(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH);
concurrentCreator.call();
int numFiles = mFsMaster.listStatus(ROOT_PATH, ListStatusOptions.defaults()).size();
ConcurrentRenamer concurrentRenamer = new ConcurrentRenamer(DEPTH, CONCURRENCY_DEPTH, ROOT_PATH,
ROOT_PATH2, AlluxioURI.EMPTY_URI);
concurrentRenamer.call();
Assert.assertEquals(numFiles,
mFsMaster.listStatus(ROOT_PATH2, ListStatusOptions.defaults()).size());
}
@Test
public void createAlreadyExistFile() throws Exception {
mThrown.expect(FileAlreadyExistsException.class);
mFsMaster.createFile(new AlluxioURI("/testFile"), CreateFileOptions.defaults());
mFsMaster.createDirectory(new AlluxioURI("/testFile"), CreateDirectoryOptions.defaults());
}
@Test
public void createDirectory() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
FileInfo fileInfo = mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertTrue(fileInfo.isFolder());
Assert.assertEquals("", fileInfo.getOwner());
Assert.assertEquals(0755, (short) fileInfo.getMode());
}
@Test
public void createFileInvalidPath() throws Exception {
mThrown.expect(InvalidPathException.class);
mFsMaster.createFile(new AlluxioURI("testFile"), CreateFileOptions.defaults());
}
@Test
public void createFileInvalidPathTest2() throws Exception {
mThrown.expect(FileAlreadyExistsException.class);
mFsMaster.createFile(new AlluxioURI("/"), CreateFileOptions.defaults());
}
@Test
public void createFileInvalidPathTest3() throws Exception {
mThrown.expect(InvalidPathException.class);
mFsMaster.createFile(new AlluxioURI("/testFile1"), CreateFileOptions.defaults());
mFsMaster.createFile(new AlluxioURI("/testFile1/testFile2"), CreateFileOptions.defaults());
}
@Test
public void createFilePerf() throws Exception {
for (int k = 0; k < 200; k++) {
CreateDirectoryOptions options = CreateDirectoryOptions.defaults().setRecursive(true);
mFsMaster.createDirectory(
new AlluxioURI("/testFile").join(Constants.MASTER_COLUMN_FILE_PREFIX + k).join("0"),
options);
}
for (int k = 0; k < 200; k++) {
mFsMaster.getFileInfo(mFsMaster.getFileId(
new AlluxioURI("/testFile").join(Constants.MASTER_COLUMN_FILE_PREFIX + k).join("0")));
}
}
@Test
public void createFile() throws Exception {
mFsMaster.createFile(new AlluxioURI("/testFile"), CreateFileOptions.defaults());
FileInfo fileInfo = mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFile")));
Assert.assertFalse(fileInfo.isFolder());
Assert.assertEquals("", fileInfo.getOwner());
Assert.assertEquals(0644, (short) fileInfo.getMode());
}
@Test
public void deleteDirectoryWithDirectories() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
mFsMaster.createDirectory(new AlluxioURI("/testFolder/testFolder2"),
CreateDirectoryOptions.defaults());
long fileId =
mFsMaster.createFile(new AlluxioURI("/testFolder/testFile"), CreateFileOptions.defaults());
long fileId2 = mFsMaster.createFile(new AlluxioURI("/testFolder/testFolder2/testFile2"),
CreateFileOptions.defaults());
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(2, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
Assert.assertEquals(fileId2,
mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2/testFile2")));
mFsMaster.delete(new AlluxioURI("/testFolder"), DeleteOptions.defaults()
.setRecursive(true));
Assert.assertEquals(IdUtils.INVALID_FILE_ID,
mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2/testFile2")));
}
@Test
public void deleteDirectoryWithDirectoriesTest2() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
mFsMaster.createDirectory(new AlluxioURI("/testFolder/testFolder2"),
CreateDirectoryOptions.defaults());
long fileId =
mFsMaster.createFile(new AlluxioURI("/testFolder/testFile"), CreateFileOptions.defaults());
long fileId2 = mFsMaster.createFile(new AlluxioURI("/testFolder/testFolder2/testFile2"),
CreateFileOptions.defaults());
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(2, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
Assert.assertEquals(fileId2,
mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2/testFile2")));
try {
mFsMaster.delete(new AlluxioURI("/testFolder/testFolder2"), DeleteOptions.defaults()
.setRecursive(false));
Assert.fail("Deleting a nonempty directory nonrecursively should fail");
} catch (DirectoryNotEmptyException e) {
Assert.assertEquals(
ExceptionMessage.DELETE_NONEMPTY_DIRECTORY_NONRECURSIVE.getMessage("testFolder2"),
e.getMessage());
}
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(2, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
Assert.assertEquals(fileId2,
mFsMaster.getFileId(new AlluxioURI("/testFolder/testFolder2/testFile2")));
}
@Test
public void deleteDirectoryWithFiles() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long fileId =
mFsMaster.createFile(new AlluxioURI("/testFolder/testFile"), CreateFileOptions.defaults());
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
mFsMaster.delete(new AlluxioURI("/testFolder"), DeleteOptions.defaults()
.setRecursive(true));
Assert.assertEquals(IdUtils.INVALID_FILE_ID,
mFsMaster.getFileId(new AlluxioURI("/testFolder")));
}
@Test
public void deleteDirectoryWithFilesTest2() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long fileId =
mFsMaster.createFile(new AlluxioURI("/testFolder/testFile"), CreateFileOptions.defaults());
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
try {
mFsMaster.delete(new AlluxioURI("/testFolder"), DeleteOptions.defaults()
.setRecursive(false));
Assert.fail("Deleting a nonempty directory nonrecursively should fail");
} catch (DirectoryNotEmptyException e) {
Assert.assertEquals(
ExceptionMessage.DELETE_NONEMPTY_DIRECTORY_NONRECURSIVE.getMessage("testFolder"),
e.getMessage());
}
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
}
@Test
public void deleteEmptyDirectory() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
Assert.assertEquals(1, mFsMaster.getFileId(new AlluxioURI("/testFolder")));
mFsMaster.delete(new AlluxioURI("/testFolder"), DeleteOptions.defaults()
.setRecursive(true));
Assert.assertEquals(IdUtils.INVALID_FILE_ID,
mFsMaster.getFileId(new AlluxioURI("/testFolder")));
}
@Test
public void deleteFile() throws Exception {
long fileId = mFsMaster.createFile(new AlluxioURI("/testFile"), CreateFileOptions.defaults());
Assert.assertEquals(fileId, mFsMaster.getFileId(new AlluxioURI("/testFile")));
mFsMaster.delete(new AlluxioURI("/testFile"), DeleteOptions.defaults().setRecursive(true));
Assert.assertEquals(IdUtils.INVALID_FILE_ID, mFsMaster.getFileId(new AlluxioURI("/testFile")));
}
@Test
public void deleteRoot() throws Exception {
mThrown.expect(InvalidPathException.class);
mThrown.expectMessage(ExceptionMessage.DELETE_ROOT_DIRECTORY.getMessage());
mFsMaster.delete(new AlluxioURI("/"), DeleteOptions.defaults().setRecursive(true));
}
@Test
public void getCapacityBytes() {
BlockMaster blockMaster =
mLocalAlluxioClusterResource.get().getMaster().getInternalMaster().getBlockMaster();
Assert.assertEquals(1000, blockMaster.getCapacityBytes());
}
@Test
public void lastModificationTimeCompleteFile() throws Exception {
long fileId = mFsMaster.createFile(new AlluxioURI("/testFile"), CreateFileOptions.defaults());
long opTimeMs = TEST_TIME_MS;
mFsMaster.completeFile(new AlluxioURI("/testFile"),
CompleteFileOptions.defaults().setOperationTimeMs(opTimeMs).setUfsLength(0));
FileInfo fileInfo = mFsMaster.getFileInfo(fileId);
Assert.assertEquals(opTimeMs, fileInfo.getLastModificationTimeMs());
}
@Test
public void lastModificationTimeCreateFile() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long opTimeMs = TEST_TIME_MS;
CreateFileOptions options = CreateFileOptions.defaults().setOperationTimeMs(opTimeMs);
try (LockedInodePath inodePath = mInodeTree
.lockInodePath(new AlluxioURI("/testFolder/testFile"), InodeTree.LockMode.WRITE)) {
mFsMaster.createFileInternal(inodePath, options);
}
FileInfo folderInfo = mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(opTimeMs, folderInfo.getLastModificationTimeMs());
}
/**
* Tests that deleting a file from a folder updates the folder's last modification time.
*/
@Test
public void lastModificationTimeDelete() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
mFsMaster.createFile(new AlluxioURI("/testFolder/testFile"), CreateFileOptions.defaults());
long folderId = mFsMaster.getFileId(new AlluxioURI("/testFolder"));
long modificationTimeBeforeDelete = mFsMaster.getFileInfo(folderId).getLastModificationTimeMs();
CommonUtils.sleepMs(2);
mFsMaster.delete(new AlluxioURI("/testFolder/testFile"), DeleteOptions.defaults()
.setRecursive(true));
long modificationTimeAfterDelete = mFsMaster.getFileInfo(folderId).getLastModificationTimeMs();
Assert.assertTrue(modificationTimeBeforeDelete < modificationTimeAfterDelete);
}
@Test
public void lastModificationTimeRename() throws Exception {
AlluxioURI srcPath = new AlluxioURI("/testFolder/testFile1");
AlluxioURI dstPath = new AlluxioURI("/testFolder/testFile2");
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
mFsMaster.createFile(srcPath, CreateFileOptions.defaults());
RenameOptions options = RenameOptions.defaults().setOperationTimeMs(TEST_TIME_MS);
mFsMaster.rename(srcPath, dstPath, options);
FileInfo folderInfo = mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder")));
Assert.assertEquals(TEST_TIME_MS, folderInfo.getLastModificationTimeMs());
}
@Test
public void listFiles() throws Exception {
CreateFileOptions options = CreateFileOptions.defaults().setBlockSizeBytes(64);
HashSet<Long> ids = new HashSet<>();
HashSet<Long> dirIds = new HashSet<>();
for (int i = 0; i < 10; i++) {
AlluxioURI dir = new AlluxioURI("/i" + i);
mFsMaster.createDirectory(dir, CreateDirectoryOptions.defaults());
dirIds.add(mFsMaster.getFileId(dir));
for (int j = 0; j < 10; j++) {
ids.add(mFsMaster.createFile(dir.join("j" + j), options));
}
}
HashSet<Long> listedIds = new HashSet<>();
HashSet<Long> listedDirIds = new HashSet<>();
List<FileInfo> infoList =
mFsMaster.listStatus(new AlluxioURI("/"), ListStatusOptions.defaults());
for (FileInfo info : infoList) {
long id = info.getFileId();
listedDirIds.add(id);
for (FileInfo fileInfo : mFsMaster.listStatus(new AlluxioURI(info.getPath()),
ListStatusOptions.defaults())) {
listedIds.add(fileInfo.getFileId());
}
}
Assert.assertEquals(ids, listedIds);
Assert.assertEquals(dirIds, listedDirIds);
}
@Test
public void ls() throws Exception {
CreateFileOptions options = CreateFileOptions.defaults().setBlockSizeBytes(64);
for (int i = 0; i < 10; i++) {
mFsMaster.createDirectory(new AlluxioURI("/i" + i), CreateDirectoryOptions.defaults());
for (int j = 0; j < 10; j++) {
mFsMaster.createFile(new AlluxioURI("/i" + i + "/j" + j), options);
}
}
Assert.assertEquals(1,
mFsMaster.listStatus(new AlluxioURI("/i0/j0"), ListStatusOptions.defaults()).size());
for (int i = 0; i < 10; i++) {
Assert.assertEquals(10,
mFsMaster.listStatus(new AlluxioURI("/i" + i), ListStatusOptions.defaults()).size());
}
Assert.assertEquals(10,
mFsMaster.listStatus(new AlluxioURI("/"), ListStatusOptions.defaults()).size());
}
@Test
public void notFileCompletion() throws Exception {
mThrown.expect(FileDoesNotExistException.class);
mFsMaster.createDirectory(new AlluxioURI("/testFile"), CreateDirectoryOptions.defaults());
CompleteFileOptions options = CompleteFileOptions.defaults();
mFsMaster.completeFile(new AlluxioURI("/testFile"), options);
}
@Test
public void renameExistingDst() throws Exception {
mFsMaster.createFile(new AlluxioURI("/testFile1"), CreateFileOptions.defaults());
mFsMaster.createFile(new AlluxioURI("/testFile2"), CreateFileOptions.defaults());
try {
mFsMaster.rename(new AlluxioURI("/testFile1"), new AlluxioURI("/testFile2"),
RenameOptions.defaults());
Assert.fail("Should not be able to rename to an existing file");
} catch (Exception e) {
// expected
}
}
@Test
public void renameNonexistent() throws Exception {
mFsMaster.createFile(new AlluxioURI("/testFile1"), CreateFileOptions.defaults());
Assert.assertEquals(IdUtils.INVALID_FILE_ID, mFsMaster.getFileId(new AlluxioURI("/testFile2")));
}
@Test
public void renameToDeeper() throws Exception {
CreateFileOptions createFileOptions = CreateFileOptions.defaults().setRecursive(true);
CreateDirectoryOptions createDirectoryOptions =
CreateDirectoryOptions.defaults().setRecursive(true);
mThrown.expect(InvalidPathException.class);
mFsMaster.createDirectory(new AlluxioURI("/testDir1/testDir2"), createDirectoryOptions);
mFsMaster.createFile(new AlluxioURI("/testDir1/testDir2/testDir3/testFile3"),
createFileOptions);
mFsMaster.rename(new AlluxioURI("/testDir1/testDir2"),
new AlluxioURI("/testDir1/testDir2/testDir3/testDir4"), RenameOptions.defaults());
}
@Test
public void ttlCreateFile() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long ttl = 100;
CreateFileOptions options = CreateFileOptions.defaults().setTtl(ttl);
options.setTtlAction(TtlAction.FREE);
try (LockedInodePath inodePath = mInodeTree
.lockInodePath(new AlluxioURI("/testFolder/testFile"), InodeTree.LockMode.WRITE)) {
mFsMaster.createFileInternal(inodePath, options);
}
FileInfo folderInfo =
mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile")));
Assert.assertEquals(ttl, folderInfo.getTtl());
Assert.assertEquals(TtlAction.FREE, folderInfo.getTtlAction());
}
@Test
public void ttlExpiredCreateFile() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long ttl = 1;
CreateFileOptions options = CreateFileOptions.defaults().setTtl(ttl);
long fileId = mFsMaster.createFile(new AlluxioURI("/testFolder/testFile1"), options);
FileInfo folderInfo =
mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile1")));
Assert.assertEquals(fileId, folderInfo.getFileId());
Assert.assertEquals(ttl, folderInfo.getTtl());
// Sleep for the ttl expiration.
CommonUtils.sleepMs(2 * TTL_CHECKER_INTERVAL_MS);
HeartbeatScheduler.execute(HeartbeatContext.MASTER_TTL_CHECK);
mThrown.expect(FileDoesNotExistException.class);
mFsMaster.getFileInfo(fileId);
}
@Test
public void ttlExpiredCreateFileWithFreeActionTest() throws Exception {
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long ttl = 1;
CreateFileOptions options =
CreateFileOptions.defaults().setPersisted(true).setTtl(ttl).setTtlAction(TtlAction.FREE);
long fileId = mFsMaster.createFile(new AlluxioURI("/testFolder/testFile1"), options);
FileInfo folderInfo =
mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile1")));
Assert.assertEquals(fileId, folderInfo.getFileId());
Assert.assertEquals(ttl, folderInfo.getTtl());
Assert.assertEquals(TtlAction.FREE, folderInfo.getTtlAction());
// Sleep for the ttl expiration.
CommonUtils.sleepMs(2 * TTL_CHECKER_INTERVAL_MS);
HeartbeatScheduler.await(HeartbeatContext.MASTER_TTL_CHECK, 10, TimeUnit.SECONDS);
HeartbeatScheduler.schedule(HeartbeatContext.MASTER_TTL_CHECK);
HeartbeatScheduler.await(HeartbeatContext.MASTER_TTL_CHECK, 10, TimeUnit.SECONDS);
FileInfo fileInfo = mFsMaster.getFileInfo(fileId);
Assert.assertEquals(Constants.NO_TTL, fileInfo.getTtl());
Assert.assertEquals(TtlAction.DELETE, fileInfo.getTtlAction());
}
@Test
public void ttlRename() throws Exception {
AlluxioURI srcPath = new AlluxioURI("/testFolder/testFile1");
AlluxioURI dstPath = new AlluxioURI("/testFolder/testFile2");
mFsMaster.createDirectory(new AlluxioURI("/testFolder"), CreateDirectoryOptions.defaults());
long ttl = 1;
CreateFileOptions createOptions = CreateFileOptions.defaults().setTtl(ttl);
mFsMaster.createFile(srcPath, createOptions);
RenameOptions renameOptions = RenameOptions.defaults().setOperationTimeMs(TEST_TIME_MS);
mFsMaster.rename(srcPath, dstPath, renameOptions);
FileInfo folderInfo =
mFsMaster.getFileInfo(mFsMaster.getFileId(new AlluxioURI("/testFolder/testFile2")));
Assert.assertEquals(ttl, folderInfo.getTtl());
}
@Test
public void concurrentCreateDelete() throws Exception {
List<Future<?>> futures = new ArrayList<>();
AlluxioURI directory = new AlluxioURI("/dir");
AlluxioURI[] files = new AlluxioURI[10];
final int numThreads = 8;
final int testDurationMs = 3000;
for (int i = 0; i < 10; i++) {
files[i] = directory.join("file_" + i);
}
mFsMaster.createDirectory(directory, CreateDirectoryOptions.defaults());
AtomicBoolean stopThreads = new AtomicBoolean(false);
CyclicBarrier barrier = new CyclicBarrier(numThreads);
ExecutorService threadPool = Executors.newCachedThreadPool();
try {
for (int i = 0; i < numThreads; i++) {
futures.add(threadPool.submit(new ConcurrentCreateDelete(barrier, stopThreads, files)));
}
CommonUtils.sleepMs(testDurationMs);
stopThreads.set(true);
for (Future<?> future : futures) {
future.get();
}
// Stop Alluxio.
mLocalAlluxioClusterResource.get().stopFS();
// Create the master using the existing journal.
createFileSystemMasterFromJournal();
} finally {
threadPool.shutdownNow();
threadPool.awaitTermination(SHUTDOWN_TIME_MS, TimeUnit.MILLISECONDS);
}
}
// TODO(gene): Journal format has changed, maybe add Version to the format and add this test back
// or remove this test when we have better tests against journal checkpoint.
// @Test
// public void writeImage() throws IOException {
// // initialize the MasterInfo
// Journal journal =
// new Journal(mLocalAlluxioCluster.getAlluxioHome() + "journal/", "image.data", "log.data",
// mMasterAlluxioConf);
// Journal
// MasterInfo info =
// new MasterInfo(new InetSocketAddress(9999), journal, mExecutorService, mMasterAlluxioConf);
// // create the output streams
// ByteArrayOutputStream os = new ByteArrayOutputStream();
// DataOutputStream dos = new DataOutputStream(os);
// ObjectMapper mapper = JsonObject.createObjectMapper();
// ObjectWriter writer = mapper.writer();
// ImageElement version = null;
// ImageElement checkpoint = null;
// // write the image
// info.writeImage(writer, dos);
// // parse the written bytes and look for the Checkpoint and Version ImageElements
// String[] splits = new String(os.toByteArray()).split("\n");
// for (String split : splits) {
// byte[] bytes = split.getBytes();
// JsonParser parser = mapper.getFactory().createParser(bytes);
// ImageElement ele = parser.readValueAs(ImageElement.class);
// if (ele.mType.equals(ImageElementType.Checkpoint)) {
// checkpoint = ele;
// }
// if (ele.mType.equals(ImageElementType.Version)) {
// version = ele;
// }
// }
// // test the elements
// Assert.assertNotNull(checkpoint);
// Assert.assertEquals(checkpoint.mType, ImageElementType.Checkpoint);
// Assert.assertEquals(Constants.JOURNAL_VERSION, version.getInt("version").intValue());
// Assert.assertEquals(1, checkpoint.getInt("inodeCounter").intValue());
// Assert.assertEquals(0, checkpoint.getInt("editTransactionCounter").intValue());
// Assert.assertEquals(0, checkpoint.getInt("dependencyCounter").intValue());
// }
/**
* This class provides multiple concurrent threads to create all files in one directory.
*/
class ConcurrentCreator implements Callable<Void> {
private int mDepth;
private int mConcurrencyDepth;
private AlluxioURI mInitPath;
private CreateFileOptions mCreateFileOptions;
/**
* Constructs the concurrent creator.
*
* @param depth the depth of files to be created in one directory
* @param concurrencyDepth the concurrency depth of files to be created in one directory
* @param initPath the directory of files to be created in
*/
ConcurrentCreator(int depth, int concurrencyDepth, AlluxioURI initPath) {
this(depth, concurrencyDepth, initPath, CreateFileOptions.defaults());
}
ConcurrentCreator(int depth, int concurrencyDepth, AlluxioURI initPath,
CreateFileOptions options) {
mDepth = depth;
mConcurrencyDepth = concurrencyDepth;
mInitPath = initPath;
mCreateFileOptions = options;
}
/**
* Authenticates the client user named TEST_USER and executes the process of creating all
* files in one directory by multiple concurrent threads.
*
* @return null
* @throws Exception if an exception occurs
*/
@Override
public Void call() throws Exception {
AuthenticatedClientUser.set(TEST_USER);
exec(mDepth, mConcurrencyDepth, mInitPath);
return null;
}
/**
* Executes the process of creating all files in one directory by multiple concurrent threads.
*
* @param depth the depth of files to be created in one directory
* @param concurrencyDepth the concurrency depth of files to be created in one directory
* @param path the directory of files to be created in
* @throws Exception if an exception occurs
*/
public void exec(int depth, int concurrencyDepth, AlluxioURI path) throws Exception {
if (depth < 1) {
return;
} else if (depth == 1) {
long fileId = mFsMaster.createFile(path, mCreateFileOptions);
Assert.assertEquals(fileId, mFsMaster.getFileId(path));
// verify the user permission for file
FileInfo fileInfo = mFsMaster.getFileInfo(fileId);
Assert.assertEquals("", fileInfo.getOwner());
Assert.assertEquals(0644, (short) fileInfo.getMode());
} else {
mFsMaster.createDirectory(path, CreateDirectoryOptions.defaults());
Assert.assertNotNull(mFsMaster.getFileId(path));
long dirId = mFsMaster.getFileId(path);
Assert.assertNotEquals(-1, dirId);
FileInfo dirInfo = mFsMaster.getFileInfo(dirId);
Assert.assertEquals("", dirInfo.getOwner());
Assert.assertEquals(0755, (short) dirInfo.getMode());
}
if (concurrencyDepth > 0) {
ExecutorService executor = Executors.newCachedThreadPool();
try {
ArrayList<Future<Void>> futures = new ArrayList<>(FILES_PER_NODE);
for (int i = 0; i < FILES_PER_NODE; i++) {
Callable<Void> call = (new ConcurrentCreator(depth - 1, concurrencyDepth - 1,
path.join(Integer.toString(i)), mCreateFileOptions));
futures.add(executor.submit(call));
}
for (Future<Void> f : futures) {
f.get();
}
} finally {
executor.shutdown();
}
} else {
for (int i = 0; i < FILES_PER_NODE; i++) {
exec(depth - 1, concurrencyDepth, path.join(Integer.toString(i)));
}
}
}
}
/**
* This class provides multiple concurrent threads to free all files in one directory.
*/
class ConcurrentFreer implements Callable<Void> {
private int mDepth;
private int mConcurrencyDepth;
private AlluxioURI mInitPath;
ConcurrentFreer(int depth, int concurrencyDepth, AlluxioURI initPath) {
mDepth = depth;
mConcurrencyDepth = concurrencyDepth;
mInitPath = initPath;
}
@Override
public Void call() throws Exception {
AuthenticatedClientUser.set(TEST_USER);
exec(mDepth, mConcurrencyDepth, mInitPath);
return null;
}
private void doFree(AlluxioURI path) throws Exception {
mFsMaster.free(path, FreeOptions.defaults().setForced(true).setRecursive(true));
Assert.assertNotEquals(IdUtils.INVALID_FILE_ID, mFsMaster.getFileId(path));
}
public void exec(int depth, int concurrencyDepth, AlluxioURI path) throws Exception {
if (depth < 1) {
return;
} else if (depth == 1 || (path.hashCode() % 10 == 0)) {
// Sometimes we want to try freeing a path when we're not all the way down, which is what
// the second condition is for.
doFree(path);
} else {
if (concurrencyDepth > 0) {
ExecutorService executor = Executors.newCachedThreadPool();
try {
ArrayList<Future<Void>> futures = new ArrayList<>(FILES_PER_NODE);
for (int i = 0; i < FILES_PER_NODE; i++) {
Callable<Void> call = (new ConcurrentDeleter(depth - 1, concurrencyDepth - 1,
path.join(Integer.toString(i))));
futures.add(executor.submit(call));
}
for (Future<Void> f : futures) {
f.get();
}
} finally {
executor.shutdown();
}
} else {
for (int i = 0; i < FILES_PER_NODE; i++) {
exec(depth - 1, concurrencyDepth, path.join(Integer.toString(i)));
}
}
doFree(path);
}
}
}
/*
* This class provides multiple concurrent threads to delete all files in one directory.
*/
class ConcurrentDeleter implements Callable<Void> {
private int mDepth;
private int mConcurrencyDepth;
private AlluxioURI mInitPath;
ConcurrentDeleter(int depth, int concurrencyDepth, AlluxioURI initPath) {
mDepth = depth;
mConcurrencyDepth = concurrencyDepth;
mInitPath = initPath;
}
@Override
public Void call() throws Exception {
AuthenticatedClientUser.set(TEST_USER);
exec(mDepth, mConcurrencyDepth, mInitPath);
return null;
}
private void doDelete(AlluxioURI path) throws Exception {
mFsMaster.delete(path, DeleteOptions.defaults().setRecursive(true));
Assert.assertEquals(IdUtils.INVALID_FILE_ID, mFsMaster.getFileId(path));
}
public void exec(int depth, int concurrencyDepth, AlluxioURI path) throws Exception {
if (depth < 1) {
return;
} else if (depth == 1 || (path.hashCode() % 10 == 0)) {
// Sometimes we want to try deleting a path when we're not all the way down, which is what
// the second condition is for.
doDelete(path);
} else {
if (concurrencyDepth > 0) {
ExecutorService executor = Executors.newCachedThreadPool();
try {
ArrayList<Future<Void>> futures = new ArrayList<>(FILES_PER_NODE);
for (int i = 0; i < FILES_PER_NODE; i++) {
Callable<Void> call = (new ConcurrentDeleter(depth - 1, concurrencyDepth - 1,
path.join(Integer.toString(i))));
futures.add(executor.submit(call));
}
for (Future<Void> f : futures) {
f.get();
}
} finally {
executor.shutdown();
}
} else {
for (int i = 0; i < FILES_PER_NODE; i++) {
exec(depth - 1, concurrencyDepth, path.join(Integer.toString(i)));
}
}
doDelete(path);
}
}
}
class ConcurrentRenamer implements Callable<Void> {
private int mDepth;
private int mConcurrencyDepth;
private AlluxioURI mRootPath;
private AlluxioURI mRootPath2;
private AlluxioURI mInitPath;
ConcurrentRenamer(int depth, int concurrencyDepth, AlluxioURI rootPath, AlluxioURI rootPath2,
AlluxioURI initPath) {
mDepth = depth;
mConcurrencyDepth = concurrencyDepth;
mRootPath = rootPath;
mRootPath2 = rootPath2;
mInitPath = initPath;
}
@Override
public Void call() throws Exception {
AuthenticatedClientUser.set(TEST_USER);
exec(mDepth, mConcurrencyDepth, mInitPath);
return null;
}
public void exec(int depth, int concurrencyDepth, AlluxioURI path) throws Exception {
if (depth < 1) {
return;
} else if (depth == 1 || (depth < mDepth && path.hashCode() % 10 < 3)) {
// Sometimes we want to try renaming a path when we're not all the way down, which is what
// the second condition is for. We have to create the path in the destination up till what
// we're renaming. This might already exist, so createFile could throw a
// FileAlreadyExistsException, which we silently handle.
AlluxioURI srcPath = mRootPath.join(path);
AlluxioURI dstPath = mRootPath2.join(path);
long fileId = mFsMaster.getFileId(srcPath);
try {
CreateDirectoryOptions options = CreateDirectoryOptions.defaults().setRecursive(true);
mFsMaster.createDirectory(dstPath.getParent(), options);
} catch (FileAlreadyExistsException | InvalidPathException e) {
// FileAlreadyExistsException: This is an acceptable exception to get, since we don't know
// if the parent has been created yet by another thread.
// InvalidPathException: This could happen if we are renaming something that's a child of
// the root.
}
mFsMaster.rename(srcPath, dstPath, RenameOptions.defaults());
Assert.assertEquals(fileId, mFsMaster.getFileId(dstPath));
} else if (concurrencyDepth > 0) {
ExecutorService executor = Executors.newCachedThreadPool();
try {
ArrayList<Future<Void>> futures = new ArrayList<>(FILES_PER_NODE);
for (int i = 0; i < FILES_PER_NODE; i++) {
Callable<Void> call = (new ConcurrentRenamer(depth - 1, concurrencyDepth - 1, mRootPath,
mRootPath2, path.join(Integer.toString(i))));
futures.add(executor.submit(call));
}
for (Future<Void> f : futures) {
f.get();
}
} finally {
executor.shutdown();
}
} else {
for (int i = 0; i < FILES_PER_NODE; i++) {
exec(depth - 1, concurrencyDepth, path.join(Integer.toString(i)));
}
}
}
}
/**
* A class to start a thread that creates a file, completes the file and then deletes the file.
*/
private class ConcurrentCreateDelete implements Callable<Void> {
private final CyclicBarrier mStartBarrier;
private final AtomicBoolean mStopThread;
private final AlluxioURI[] mFiles;
public ConcurrentCreateDelete(CyclicBarrier barrier, AtomicBoolean stopThread,
AlluxioURI[] files) {
mStartBarrier = barrier;
mStopThread = stopThread;
mFiles = files;
}
@Override
public Void call() throws Exception {
AuthenticatedClientUser.set(TEST_USER);
mStartBarrier.await();
Random random = new Random();
while (!mStopThread.get()) {
int id = random.nextInt(mFiles.length);
try {
// Create and complete a random file.
mFsMaster.createFile(mFiles[id], CreateFileOptions.defaults());
mFsMaster.completeFile(mFiles[id], CompleteFileOptions.defaults());
} catch (FileAlreadyExistsException | FileDoesNotExistException
| FileAlreadyCompletedException e) {
// Ignore
} catch (Exception e) {
throw e;
}
id = random.nextInt(mFiles.length);
try {
// Delete a random file.
mFsMaster.delete(mFiles[id], DeleteOptions.defaults().setRecursive(false));
} catch (FileDoesNotExistException e) {
// Ignore
} catch (Exception e) {
throw e;
}
}
return null;
}
}
}
|
[SMALLFIX] Add javadoc for FileSystemMasterIntegrationTest#ConcurrentDeleter
|
tests/src/test/java/alluxio/master/file/FileSystemMasterIntegrationTest.java
|
[SMALLFIX] Add javadoc for FileSystemMasterIntegrationTest#ConcurrentDeleter
|
|
Java
|
apache-2.0
|
af59c46363f3497d44548021e4ff15d924ddbec3
| 0
|
apache/solr,apache/solr,apache/solr,apache/solr,apache/solr
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.cloud;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
import org.apache.solr.common.util.IOUtils;
import org.apache.zookeeper.KeeperException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@LuceneTestCase.Slow
@LuceneTestCase.AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-12386") // "Can't find resource"
public class ConcurrentCreateRoutedAliasTest extends SolrTestCaseJ4 {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private MiniSolrCloudCluster solrCluster;
// to avoid having to delete stuff...
volatile int num = 0;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
solrCluster = new MiniSolrCloudCluster(4, createTempDir(), buildJettyConfig("/solr"));
}
@Override
@After
public void tearDown() throws Exception {
solrCluster.shutdown();
super.tearDown();
}
@Test
public void testConcurrentCreateRoutedAliasMinimal() throws IOException, KeeperException.NoNodeException {
// this is the test where be blow out a bunch of create commands all out at once.
// other tests are more functionality based, and just use a single thread.
// Failure of this test very occasionally due to overseer overload would not be worrisome (just bothersome).
// Any use case creating large numbers of time routed aliases concurrently would be an EXTREMELY odd
// if not fundamentally broken use case. This test method is just here to guard against any race
// conditions in the code that could crop up rarely in lower volume usage.
// That said any failures involving about NPE's or missing parameters or oddities other than overwhelming
// the overseer queue with retry races emanating from this test should be investigated. Also if it fails
// frequently that needs to be investigated of course.
final AtomicReference<Exception> failure = new AtomicReference<>();
// Note: this number of threads seems to work regularly with the up-tweaked number of retries (50) in
// org.apache.solr.common.cloud.ZkStateReader.AliasesManager.applyModificationAndExportToZk()
// with the original 5 retries this wouldn't reliably pass with 10 threads, but with 50 retries it seems
// to handle 50 threads about a dozen times without any failure (on a 32 thread processor)
// it also passed 3/3 at 150 threads and 2/3 with 250 threads on both 1 node and 4 nodes...
// the failure mode seems to be overseer tasks that are not found. I suspect this happens when enough
// threads get into retry races and the spam overwhelms the overseer. (that this can happen might imply
// an issue over there, but I'm not sure, since there is an intentional hard limit on the overseer queue
// and I haven't tried to count the retries up and figure out if the requests are actually exceeding that
// limit or not, but the speed of retries might indicate an effectively hot loop, but again, a separate issue.
// The hope is that the level of concurrency supported by create routed alias and the code it uses is such
// that this test wouldn't spuriously fail more than once a year. If that's true users should never see
// an issue in the wild unless they are doing something we probably don't want to support anyway
final CreateRoutedAliasThread[] threads = new CreateRoutedAliasThread[4];
int numStart = num;
for (; num < threads.length + numStart; num++) {
final String aliasName = "testAlias" + num;
final String baseUrl = solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString();
final SolrClient solrClient = getHttpSolrClient(baseUrl);
int i = num - numStart;
threads[i] = new CreateRoutedAliasThread("create-delete-search-" + i, aliasName, "NOW/HOUR",
solrClient, failure, false);
}
startAll(threads);
joinAll(threads);
assertNull("concurrent alias creation failed " + failure.get(), failure.get());
}
@Test
public void testConcurrentCreateRoutedAliasComplex() {
final AtomicReference<Exception> failure = new AtomicReference<>();
final CreateRoutedAliasThread[] threads = new CreateRoutedAliasThread[1];
int numStart = num;
System.out.println("NUM ==> " +num);
for (; num < threads.length + numStart; num++) {
final String aliasName = "testAliasCplx" + num;
final String baseUrl = solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString();
final SolrClient solrClient = getHttpSolrClient(baseUrl);
int i = num - numStart;
threads[i] = new CreateRoutedAliasThread("create-routed-alias-cplx-" + i,
aliasName, "2017-12-25T23:24:25Z",
solrClient, failure, true);
}
startAll(threads);
joinAll(threads);
assertNull("concurrent alias creation failed " + failure.get(), failure.get());
}
private void joinAll(final CreateRoutedAliasThread[] threads) {
for (CreateRoutedAliasThread t : threads) {
try {
t.joinAndClose();
} catch (InterruptedException e) {
Thread.interrupted();
throw new RuntimeException(e);
}
}
}
private void startAll(final Thread[] threads) {
for (Thread t : threads) {
t.start();
}
}
private static class CreateRoutedAliasThread extends Thread {
final String aliasName;
protected final String start;
protected final SolrClient solrClient;
protected final AtomicReference<Exception> failure;
CreateRoutedAliasThread(
String name, String aliasName, String start, SolrClient solrClient,
AtomicReference<Exception> failure, boolean v2) {
super(name);
this.aliasName = aliasName;
this.start = start;
this.solrClient = solrClient;
this.failure = failure;
}
@Override
public void run() {
doWork();
}
void doWork() {
createAlias();
}
void addFailure(Exception e) {
log.error("Add Failure", e);
synchronized (failure) {
if (failure.get() != null) {
failure.get().addSuppressed(e);
} else {
failure.set(e);
}
}
}
private void createAlias() {
try {
CollectionAdminRequest.CreateTimeRoutedAlias rq = CollectionAdminRequest
.createTimeRoutedAlias(
aliasName,
start,
"+12HOUR",
"routedFoo_dt",
CollectionAdminRequest.createCollection("_ignored_", "_default", 1, 1)
);
final CollectionAdminResponse response = rq.process(solrClient);
if (response.getStatus() != 0) {
addFailure(new RuntimeException("failed to create collection " + aliasName));
}
} catch (Exception e) {
addFailure(e);
}
}
void joinAndClose() throws InterruptedException {
try {
super.join(60000);
} finally {
IOUtils.closeQuietly(solrClient);
}
}
}
}
|
solr/core/src/test/org/apache/solr/cloud/ConcurrentCreateRoutedAliasTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.cloud;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
import org.apache.solr.common.util.IOUtils;
import org.apache.zookeeper.KeeperException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@LuceneTestCase.Slow
public class ConcurrentCreateRoutedAliasTest extends SolrTestCaseJ4 {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private MiniSolrCloudCluster solrCluster;
// to avoid having to delete stuff...
volatile int num = 0;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
solrCluster = new MiniSolrCloudCluster(4, createTempDir(), buildJettyConfig("/solr"));
}
@Override
@After
public void tearDown() throws Exception {
solrCluster.shutdown();
super.tearDown();
}
@Test
public void testConcurrentCreateRoutedAliasMinimal() throws IOException, KeeperException.NoNodeException {
// this is the test where be blow out a bunch of create commands all out at once.
// other tests are more functionality based, and just use a single thread.
// Failure of this test very occasionally due to overseer overload would not be worrisome (just bothersome).
// Any use case creating large numbers of time routed aliases concurrently would be an EXTREMELY odd
// if not fundamentally broken use case. This test method is just here to guard against any race
// conditions in the code that could crop up rarely in lower volume usage.
// That said any failures involving about NPE's or missing parameters or oddities other than overwhelming
// the overseer queue with retry races emanating from this test should be investigated. Also if it fails
// frequently that needs to be investigated of course.
final AtomicReference<Exception> failure = new AtomicReference<>();
// Note: this number of threads seems to work regularly with the up-tweaked number of retries (50) in
// org.apache.solr.common.cloud.ZkStateReader.AliasesManager.applyModificationAndExportToZk()
// with the original 5 retries this wouldn't reliably pass with 10 threads, but with 50 retries it seems
// to handle 50 threads about a dozen times without any failure (on a 32 thread processor)
// it also passed 3/3 at 150 threads and 2/3 with 250 threads on both 1 node and 4 nodes...
// the failure mode seems to be overseer tasks that are not found. I suspect this happens when enough
// threads get into retry races and the spam overwhelms the overseer. (that this can happen might imply
// an issue over there, but I'm not sure, since there is an intentional hard limit on the overseer queue
// and I haven't tried to count the retries up and figure out if the requests are actually exceeding that
// limit or not, but the speed of retries might indicate an effectively hot loop, but again, a separate issue.
// The hope is that the level of concurrency supported by create routed alias and the code it uses is such
// that this test wouldn't spuriously fail more than once a year. If that's true users should never see
// an issue in the wild unless they are doing something we probably don't want to support anyway
final CreateRoutedAliasThread[] threads = new CreateRoutedAliasThread[4];
int numStart = num;
for (; num < threads.length + numStart; num++) {
final String aliasName = "testAlias" + num;
final String baseUrl = solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString();
final SolrClient solrClient = getHttpSolrClient(baseUrl);
int i = num - numStart;
threads[i] = new CreateRoutedAliasThread("create-delete-search-" + i, aliasName, "NOW/HOUR",
solrClient, failure, false);
}
startAll(threads);
joinAll(threads);
assertNull("concurrent alias creation failed " + failure.get(), failure.get());
}
@Test
public void testConcurrentCreateRoutedAliasComplex() {
final AtomicReference<Exception> failure = new AtomicReference<>();
final CreateRoutedAliasThread[] threads = new CreateRoutedAliasThread[1];
int numStart = num;
System.out.println("NUM ==> " +num);
for (; num < threads.length + numStart; num++) {
final String aliasName = "testAliasCplx" + num;
final String baseUrl = solrCluster.getJettySolrRunners().get(0).getBaseUrl().toString();
final SolrClient solrClient = getHttpSolrClient(baseUrl);
int i = num - numStart;
threads[i] = new CreateRoutedAliasThread("create-routed-alias-cplx-" + i,
aliasName, "2017-12-25T23:24:25Z",
solrClient, failure, true);
}
startAll(threads);
joinAll(threads);
assertNull("concurrent alias creation failed " + failure.get(), failure.get());
}
private void joinAll(final CreateRoutedAliasThread[] threads) {
for (CreateRoutedAliasThread t : threads) {
try {
t.joinAndClose();
} catch (InterruptedException e) {
Thread.interrupted();
throw new RuntimeException(e);
}
}
}
private void startAll(final Thread[] threads) {
for (Thread t : threads) {
t.start();
}
}
private static class CreateRoutedAliasThread extends Thread {
final String aliasName;
protected final String start;
protected final SolrClient solrClient;
protected final AtomicReference<Exception> failure;
CreateRoutedAliasThread(
String name, String aliasName, String start, SolrClient solrClient,
AtomicReference<Exception> failure, boolean v2) {
super(name);
this.aliasName = aliasName;
this.start = start;
this.solrClient = solrClient;
this.failure = failure;
}
@Override
public void run() {
doWork();
}
void doWork() {
createAlias();
}
void addFailure(Exception e) {
log.error("Add Failure", e);
synchronized (failure) {
if (failure.get() != null) {
failure.get().addSuppressed(e);
} else {
failure.set(e);
}
}
}
private void createAlias() {
try {
CollectionAdminRequest.CreateTimeRoutedAlias rq = CollectionAdminRequest
.createTimeRoutedAlias(
aliasName,
start,
"+12HOUR",
"routedFoo_dt",
CollectionAdminRequest.createCollection("_ignored_", "_default", 1, 1)
);
final CollectionAdminResponse response = rq.process(solrClient);
if (response.getStatus() != 0) {
addFailure(new RuntimeException("failed to create collection " + aliasName));
}
} catch (Exception e) {
addFailure(e);
}
}
void joinAndClose() throws InterruptedException {
try {
super.join(60000);
} finally {
IOUtils.closeQuietly(solrClient);
}
}
}
}
|
SOLR-12386: Apply AwaitsFix to ConcurrentCreateRoutedAliasTest
|
solr/core/src/test/org/apache/solr/cloud/ConcurrentCreateRoutedAliasTest.java
|
SOLR-12386: Apply AwaitsFix to ConcurrentCreateRoutedAliasTest
|
|
Java
|
apache-2.0
|
b896e1aa09f46ee3b9de25b77ce62b6806bc171f
| 0
|
mrdomino/bazel,mbrukman/bazel,kchodorow/bazel,damienmg/bazel,spxtr/bazel,dslomov/bazel,davidzchen/bazel,werkt/bazel,perezd/bazel,hermione521/bazel,cushon/bazel,iamthearm/bazel,hermione521/bazel,mrdomino/bazel,snnn/bazel,hermione521/bazel,ulfjack/bazel,perezd/bazel,dropbox/bazel,variac/bazel,Asana/bazel,bazelbuild/bazel,hermione521/bazel,zhexuany/bazel,perezd/bazel,zhexuany/bazel,ulfjack/bazel,katre/bazel,akira-baruah/bazel,dropbox/bazel,meteorcloudy/bazel,perezd/bazel,mikelikespie/bazel,kchodorow/bazel,werkt/bazel,kchodorow/bazel,akira-baruah/bazel,LuminateWireless/bazel,UrbanCompass/bazel,iamthearm/bazel,damienmg/bazel,kchodorow/bazel-1,katre/bazel,kchodorow/bazel-1,aehlig/bazel,hermione521/bazel,mikelikespie/bazel,davidzchen/bazel,safarmer/bazel,mbrukman/bazel,katre/bazel,aehlig/bazel,juhalindfors/bazel-patches,aehlig/bazel,twitter-forks/bazel,cushon/bazel,LuminateWireless/bazel,safarmer/bazel,ulfjack/bazel,aehlig/bazel,akira-baruah/bazel,UrbanCompass/bazel,iamthearm/bazel,cushon/bazel,Asana/bazel,UrbanCompass/bazel,twitter-forks/bazel,safarmer/bazel,twitter-forks/bazel,kchodorow/bazel-1,bazelbuild/bazel,ButterflyNetwork/bazel,davidzchen/bazel,bazelbuild/bazel,ulfjack/bazel,Asana/bazel,LuminateWireless/bazel,dslomov/bazel-windows,cushon/bazel,spxtr/bazel,dropbox/bazel,juhalindfors/bazel-patches,kchodorow/bazel-1,ulfjack/bazel,meteorcloudy/bazel,ButterflyNetwork/bazel,mrdomino/bazel,spxtr/bazel,UrbanCompass/bazel,mrdomino/bazel,zhexuany/bazel,spxtr/bazel,werkt/bazel,mikelikespie/bazel,dropbox/bazel,Asana/bazel,dslomov/bazel-windows,snnn/bazel,twitter-forks/bazel,iamthearm/bazel,UrbanCompass/bazel,ulfjack/bazel,akira-baruah/bazel,juhalindfors/bazel-patches,damienmg/bazel,mbrukman/bazel,UrbanCompass/bazel,werkt/bazel,safarmer/bazel,dslomov/bazel,akira-baruah/bazel,davidzchen/bazel,iamthearm/bazel,dslomov/bazel,dslomov/bazel,meteorcloudy/bazel,meteorcloudy/bazel,Asana/bazel,bazelbuild/bazel,kchodorow/bazel,perezd/bazel,werkt/bazel,dslomov/bazel-windows,perezd/bazel,dslomov/bazel-windows,cushon/bazel,mrdomino/bazel,dslomov/bazel-windows,mbrukman/bazel,ButterflyNetwork/bazel,variac/bazel,davidzchen/bazel,damienmg/bazel,safarmer/bazel,dslomov/bazel,snnn/bazel,damienmg/bazel,juhalindfors/bazel-patches,dslomov/bazel-windows,damienmg/bazel,snnn/bazel,snnn/bazel,kchodorow/bazel,meteorcloudy/bazel,aehlig/bazel,aehlig/bazel,meteorcloudy/bazel,katre/bazel,bazelbuild/bazel,zhexuany/bazel,mikelikespie/bazel,safarmer/bazel,cushon/bazel,variac/bazel,davidzchen/bazel,davidzchen/bazel,aehlig/bazel,spxtr/bazel,LuminateWireless/bazel,dslomov/bazel,perezd/bazel,ButterflyNetwork/bazel,mikelikespie/bazel,twitter-forks/bazel,zhexuany/bazel,mbrukman/bazel,mbrukman/bazel,katre/bazel,twitter-forks/bazel,snnn/bazel,variac/bazel,hermione521/bazel,twitter-forks/bazel,ButterflyNetwork/bazel,akira-baruah/bazel,kchodorow/bazel,dropbox/bazel,dropbox/bazel,mikelikespie/bazel,ulfjack/bazel,kchodorow/bazel-1,Asana/bazel,variac/bazel,kchodorow/bazel-1,mrdomino/bazel,bazelbuild/bazel,damienmg/bazel,iamthearm/bazel,variac/bazel,katre/bazel,variac/bazel,spxtr/bazel,snnn/bazel,ButterflyNetwork/bazel,juhalindfors/bazel-patches,werkt/bazel,kchodorow/bazel,Asana/bazel,meteorcloudy/bazel,spxtr/bazel,dslomov/bazel,juhalindfors/bazel-patches,juhalindfors/bazel-patches,LuminateWireless/bazel,LuminateWireless/bazel,zhexuany/bazel
|
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android;
import static com.android.SdkConstants.ANDROID_STYLE_RESOURCE_PREFIX;
import static com.android.SdkConstants.ANDROID_URI;
import static com.android.SdkConstants.ATTR_NAME;
import static com.android.SdkConstants.ATTR_PARENT;
import static com.android.SdkConstants.ATTR_TYPE;
import static com.android.SdkConstants.DOT_CLASS;
import static com.android.SdkConstants.DOT_GIF;
import static com.android.SdkConstants.DOT_JPEG;
import static com.android.SdkConstants.DOT_JPG;
import static com.android.SdkConstants.DOT_PNG;
import static com.android.SdkConstants.DOT_SVG;
import static com.android.SdkConstants.DOT_XML;
import static com.android.SdkConstants.FD_RES_VALUES;
import static com.android.SdkConstants.PREFIX_ANDROID;
import static com.android.SdkConstants.STYLE_RESOURCE_PREFIX;
import static com.android.SdkConstants.TAG_ITEM;
import static com.android.SdkConstants.TAG_RESOURCES;
import static com.android.SdkConstants.TAG_STYLE;
import static com.android.utils.SdkUtils.endsWith;
import static com.android.utils.SdkUtils.endsWithIgnoreCase;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.io.ByteStreams;
import com.google.common.io.Closeables;
import com.google.common.io.Files;
import com.android.annotations.NonNull;
import com.android.annotations.Nullable;
import com.android.annotations.VisibleForTesting;
import com.android.ide.common.resources.ResourceUrl;
import com.android.ide.common.resources.configuration.DensityQualifier;
import com.android.ide.common.resources.configuration.FolderConfiguration;
import com.android.ide.common.resources.configuration.ResourceQualifier;
import com.android.ide.common.xml.XmlPrettyPrinter;
import com.android.resources.FolderTypeRelationship;
import com.android.resources.ResourceFolderType;
import com.android.resources.ResourceType;
import com.android.utils.XmlUtils;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.FileHandler;
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.xml.parsers.ParserConfigurationException;
/**
* Class responsible for searching through a Gradle built tree (after resource merging, compilation
* and ProGuarding has been completed, but before final .apk assembly), which figures out which
* resources if any are unused, and removes them. <p> It does this by examining <ul> <li>The merged
* manifest, to find root resource references (such as drawables used for activity icons)</li>
* <li>The merged R class (to find the actual integer constants assigned to resources)</li> <li>The
* ProGuard log files (to find the mapping from original symbol names to short names)</li>* <li>The
* merged resources (to find which resources reference other resources, e.g. drawable state lists
* including other drawables, or layouts including other layouts, or styles referencing other
* drawables, or menus items including action layouts, etc.)</li> <li>The ProGuard output classes
* (to find resource references in code that are actually reachable)</li> </ul> From all this, it
* builds up a reference graph, and based on the root references (e.g. from the manifest and from
* the remaining code) it computes which resources are actually reachable in the app, and anything
* that is not reachable is then marked for deletion. <p> A resource is referenced in code if either
* the field R.type.name is referenced (which is the case for non-final resource references, e.g. in
* libraries), or if the corresponding int value is referenced (for final resource values). We check
* this by looking at the ProGuard output classes with an ASM visitor. One complication is that code
* can also call {@code Resources#getIdentifier(String,String,String)} where they can pass in the
* names of resources to look up. To handle this scenario, we use the ClassVisitor to see if there
* are any calls to the specific {@code Resources#getIdentifier} method. If not, great, the usage
* analysis is completely accurate. If we <b>do</b> find one, we check <b>all</b> the string
* constants found anywhere in the app, and look to see if any look relevant. For example, if we
* find the string "string/foo" or "my.pkg:string/foo", we will then mark the string resource named
* foo (if any) as potentially used. Similarly, if we find just "foo" or "/foo", we will mark
* <b>all</b> resources named "foo" as potentially used. However, if the string is "bar/foo" or "
* foo " these strings are ignored. This means we can potentially miss resources usages where the
* resource name is completed computed (e.g. by concatenating individual characters or taking
* substrings of strings that do not look like resource names), but that seems extremely unlikely to
* be a real-world scenario. <p> For now, for reasons detailed in the code, this only applies to
* file-based resources like layouts, menus and drawables, not value-based resources like strings
* and dimensions.
*/
public class ResourceShrinker {
public static final int TYPICAL_RESOURCE_COUNT = 200;
private final Set<String> resourcePackages;
private final Path rTxt;
private final Path classesJar;
private final Path mergedManifest;
private final Path mergedResourceDir;
private final Logger logger;
/**
* The computed set of unused resources
*/
private List<Resource> unused;
/**
* List of all known resources (parsed from R.java)
*/
private List<Resource> resources = Lists.newArrayListWithExpectedSize(TYPICAL_RESOURCE_COUNT);
/**
* Map from R field value to corresponding resource
*/
private Map<Integer, Resource> valueToResource =
Maps.newHashMapWithExpectedSize(TYPICAL_RESOURCE_COUNT);
/**
* Map from resource type to map from resource name to resource object
*/
private Map<ResourceType, Map<String, Resource>> typeToName =
Maps.newEnumMap(ResourceType.class);
/**
* Map from resource class owners (VM format class) to corresponding resource types. This will
* typically be the fully qualified names of the R classes, as well as any renamed versions of
* those discovered in the mapping.txt file from ProGuard
*/
private Map<String, ResourceType> resourceClassOwners = Maps.newHashMapWithExpectedSize(20);
public ResourceShrinker(
Set<String> resourcePackages,
@NonNull Path rTxt,
@NonNull Path classesJar,
@NonNull Path manifest,
@NonNull Path resources,
Path logFile) {
this.resourcePackages = resourcePackages;
this.rTxt = rTxt;
this.classesJar = classesJar;
this.mergedManifest = manifest;
this.mergedResourceDir = resources;
this.logger = Logger.getLogger(getClass().getName());
logger.setLevel(Level.FINE);
if (logFile != null) {
try {
FileHandler fileHandler = new FileHandler(logFile.toString());
fileHandler.setLevel(Level.FINE);
fileHandler.setFormatter(new Formatter(){
@Override public String format(LogRecord record) {
return record.getMessage() + "\n";
}
});
logger.addHandler(fileHandler);
} catch (SecurityException | IOException e) {
logger.warning(String.format("Unable to open '%s' to write log.", logFile));
}
}
}
public void shrink(Path destinationDir) throws IOException,
ParserConfigurationException, SAXException {
parseResourceTxtFile(rTxt, resourcePackages);
recordUsages(classesJar);
recordManifestUsages(mergedManifest);
recordResources(mergedResourceDir);
keepPossiblyReferencedResources();
dumpReferences();
findUnused();
removeUnused(destinationDir);
}
/**
* Remove resources (already identified by {@link #shrink(Path)}).
*
* <p>This task will copy all remaining used resources over from the full resource directory to a
* new reduced resource directory and removes unused values from all value xml files.
*
* @param destination directory to copy resources into; if null, delete resources in place
*/
private void removeUnused(Path destination) throws IOException,
ParserConfigurationException, SAXException {
assert unused != null; // should always call analyze() first
int resourceCount = unused.size() * 4; // *4: account for some resource folder repetition
Set<File> skip = Sets.newHashSetWithExpectedSize(resourceCount);
Set<File> rewrite = Sets.newHashSetWithExpectedSize(resourceCount);
Set<Resource> deleted = Sets.newHashSetWithExpectedSize(resourceCount);
for (Resource resource : unused) {
if (resource.declarations != null) {
for (File file : resource.declarations) {
String folder = file.getParentFile().getName();
ResourceFolderType folderType = ResourceFolderType.getFolderType(folder);
if (folderType != null && folderType != ResourceFolderType.VALUES) {
logger.fine("Deleted unused resource " + file);
assert skip != null;
skip.add(file);
deleted.add(resource);
} else {
// Can't delete values immediately; there can be many resources
// in this file, so we have to process them all
rewrite.add(file);
deleted.add(resource);
}
}
}
}
// Special case the base values.xml folder
File values = new File(mergedResourceDir.toFile(),
FD_RES_VALUES + File.separatorChar + "values.xml");
if (values.exists()) {
rewrite.add(values);
}
Map<File, String> rewritten = Maps.newHashMapWithExpectedSize(rewrite.size());
rewriteXml(rewrite, rewritten);
// TODO(apell): The graph traversal does not mark IDs as reachable or not, so they cannot be
// accurately removed from public.xml, but the declarations may be deleted if they occur in
// other files. IDs should be added to values.xml so that there are no definitions in public.xml
// without declarations.
createStubIds(values, rewritten);
File publicXml = new File(mergedResourceDir.toFile(),
FD_RES_VALUES + File.separatorChar + "public.xml");
trimPublicResources(publicXml, deleted, rewritten);
filteredCopy(mergedResourceDir.toFile(), destination, skip, rewritten);
}
/**
* Deletes unused resources from value XML files.
*/
private void rewriteXml(Set<File> rewrite, Map<File, String> rewritten)
throws IOException, ParserConfigurationException, SAXException {
// Delete value resources: Must rewrite the XML files
for (File file : rewrite) {
String xml = Files.toString(file, UTF_8);
Document document = XmlUtils.parseDocument(xml, true);
Element root = document.getDocumentElement();
if (root != null && TAG_RESOURCES.equals(root.getTagName())) {
List<String> removed = Lists.newArrayList();
stripUnused(root, removed);
logger.fine("Removed " + removed.size() + " unused resources from " + file + ":\n "
+ Joiner.on(", ").join(removed));
String formatted = XmlPrettyPrinter.prettyPrint(document, xml.endsWith("\n"));
rewritten.put(file, formatted);
}
}
}
/**
* Write stub values for IDs to values.xml to match those available in public.xml.
*/
private void createStubIds(File values, Map<File, String> rewritten)
throws IOException, ParserConfigurationException, SAXException {
if (values.exists()) {
String xml = rewritten.get(values);
if (xml == null) {
xml = Files.toString(values, UTF_8);
}
Document document = XmlUtils.parseDocument(xml, true);
Element root = document.getDocumentElement();
for (Resource resource : resources) {
if (resource.type == ResourceType.ID && !resource.hasDefault) {
Element item = document.createElement(TAG_ITEM);
item.setAttribute(ATTR_TYPE, resource.type.getName());
item.setAttribute(ATTR_NAME, resource.name);
root.appendChild(item);
}
}
String formatted = XmlPrettyPrinter.prettyPrint(document, xml.endsWith("\n"));
rewritten.put(values, formatted);
}
}
/**
* Remove public definitions of unused resources.
*/
private void trimPublicResources(File publicXml, Set<Resource> deleted,
Map<File, String> rewritten) throws IOException, ParserConfigurationException, SAXException {
if (publicXml.exists()) {
String xml = rewritten.get(publicXml);
if (xml == null) {
xml = Files.toString(publicXml, UTF_8);
}
Document document = XmlUtils.parseDocument(xml, true);
Element root = document.getDocumentElement();
if (root != null && TAG_RESOURCES.equals(root.getTagName())) {
NodeList children = root.getChildNodes();
for (int i = children.getLength() - 1; i >= 0; i--) {
Node child = children.item(i);
if (child.getNodeType() == Node.ELEMENT_NODE) {
Element resourceElement = (Element) child;
ResourceType type = ResourceType.getEnum(resourceElement.getAttribute(ATTR_TYPE));
String name = resourceElement.getAttribute(ATTR_NAME);
if (type != null && name != null) {
Resource resource = getResource(type, name);
if (resource != null && deleted.contains(resource)) {
root.removeChild(child);
}
}
}
}
}
String formatted = XmlPrettyPrinter.prettyPrint(document, xml.endsWith("\n"));
rewritten.put(publicXml, formatted);
}
}
/**
* Copies one resource directory tree into another; skipping some files, replacing the contents of
* some, and passing everything else through unmodified
*/
private static void filteredCopy(File source, Path destination, Set<File> skip,
Map<File, String> replace) throws IOException {
File destinationFile = destination.toFile();
if (source.isDirectory()) {
File[] children = source.listFiles();
if (children != null) {
if (!destinationFile.exists()) {
boolean success = destinationFile.mkdirs();
if (!success) {
throw new IOException("Could not create " + destination);
}
}
for (File child : children) {
filteredCopy(child, destination.resolve(child.getName()), skip, replace);
}
}
} else if (!skip.contains(source) && source.isFile()) {
String contents = replace.get(source);
if (contents != null) {
Files.write(contents, destinationFile, UTF_8);
} else {
Files.copy(source, destinationFile);
}
}
}
private void stripUnused(Element element, List<String> removed) {
ResourceType type = getResourceType(element);
if (type == ResourceType.ATTR) {
// Not yet properly handled
return;
}
Resource resource = getResource(element);
if (resource != null) {
if (resource.type == ResourceType.DECLARE_STYLEABLE
|| resource.type == ResourceType.ATTR) {
// Don't strip children of declare-styleable; we're not correctly
// tracking field references of the R_styleable_attr fields yet
return;
}
if (!resource.reachable
&& (resource.type == ResourceType.STYLE
|| resource.type == ResourceType.PLURALS
|| resource.type == ResourceType.ARRAY)) {
NodeList children = element.getChildNodes();
for (int i = children.getLength() - 1; i >= 0; i--) {
Node child = children.item(i);
element.removeChild(child);
}
return;
}
}
NodeList children = element.getChildNodes();
for (int i = children.getLength() - 1; i >= 0; i--) {
Node child = children.item(i);
if (child.getNodeType() == Node.ELEMENT_NODE) {
stripUnused((Element) child, removed);
}
}
if (resource != null && !resource.reachable && resource.isRelevantType()) {
removed.add(resource.getUrl());
Node parent = element.getParentNode();
parent.removeChild(element);
}
}
private static String getFieldName(Element element) {
return getFieldName(element.getAttribute(ATTR_NAME));
}
@Nullable
private Resource getResource(Element element) {
ResourceType type = getResourceType(element);
if (type != null) {
String name = getFieldName(element);
return getResource(type, name);
}
return null;
}
private static ResourceType getResourceType(Element element) {
String tagName = element.getTagName();
switch (tagName) {
case TAG_ITEM:
String typeName = element.getAttribute(ATTR_TYPE);
if (!typeName.isEmpty()) {
return ResourceType.getEnum(typeName);
}
break;
case "string-array":
case "integer-array":
return ResourceType.ARRAY;
default:
return ResourceType.getEnum(tagName);
}
return null;
}
private void findUnused() {
List<Resource> roots = Lists.newArrayList();
for (Resource resource : resources) {
if (resource.reachable && resource.type != ResourceType.ID
&& resource.type != ResourceType.ATTR) {
roots.add(resource);
}
}
logger.fine(String.format("The root reachable resources are:\n %s",
Joiner.on(",\n ").join(roots)));
Map<Resource, Boolean> seen = new IdentityHashMap<>(resources.size());
for (Resource root : roots) {
visit(root, seen);
}
List<Resource> unused = Lists.newArrayListWithExpectedSize(resources.size());
for (Resource resource : resources) {
if (!resource.reachable && resource.isRelevantType()) {
unused.add(resource);
}
}
this.unused = unused;
}
private static void visit(Resource root, Map<Resource, Boolean> seen) {
if (seen.containsKey(root)) {
return;
}
seen.put(root, Boolean.TRUE);
root.reachable = true;
if (root.references != null) {
for (Resource referenced : root.references) {
visit(referenced, seen);
}
}
}
private void dumpReferences() {
for (Resource resource : resources) {
if (resource.references != null) {
logger.fine(resource + " => " + resource.references);
}
}
}
private void keepPossiblyReferencedResources() {
if (!mFoundGetIdentifier || mStrings == null) {
// No calls to android.content.res.Resources#getIdentifier; no need
// to worry about string references to resources
return;
}
List<String> strings = new ArrayList<String>(mStrings);
Collections.sort(strings);
logger.fine(String.format("android.content.res.Resources#getIdentifier present: %s",
mFoundGetIdentifier));
logger.fine("Referenced Strings:");
for (String s : strings) {
s = s.trim().replace("\n", "\\n");
if (s.length() > 40) {
s = s.substring(0, 37) + "...";
} else if (s.isEmpty()) {
continue;
}
logger.fine(" " + s);
}
Set<String> names = Sets.newHashSetWithExpectedSize(50);
for (Map<String, Resource> map : typeToName.values()) {
names.addAll(map.keySet());
}
for (String string : mStrings) {
// Check whether the string looks relevant
// We consider three types of strings:
// (1) simple resource names, e.g. "foo" from @layout/foo
// These might be the parameter to a getIdentifier() call, or could
// be composed into a fully qualified resource name for the getIdentifier()
// method. We match these for *all* resource types.
// (2) Relative source names, e.g. layout/foo, from @layout/foo
// These might be composed into a fully qualified resource name for
// getIdentifier().
// (3) Fully qualified resource names of the form package:type/name.
int n = string.length();
boolean justName = true;
boolean haveSlash = false;
for (int i = 0; i < n; i++) {
char c = string.charAt(i);
if (c == '/') {
haveSlash = true;
justName = false;
} else if (c == '.' || c == ':') {
justName = false;
} else if (!Character.isJavaIdentifierPart(c)) {
// This shouldn't happen; we've filtered out these strings in
// the {@link #referencedString} method
assert false : string;
break;
}
}
String name;
if (justName) {
// Check name (below)
name = string;
} else if (!haveSlash) {
// If we have more than just a symbol name, we expect to also see a slash
//noinspection UnnecessaryContinue
continue;
} else {
// Try to pick out the resource name pieces; if we can find the
// resource type unambiguously; if not, just match on names
int slash = string.indexOf('/');
assert slash != -1; // checked with haveSlash above
name = string.substring(slash + 1);
if (name.isEmpty() || !names.contains(name)) {
continue;
}
// See if have a known specific resource type
if (slash > 0) {
int colon = string.indexOf(':');
String typeName = string.substring(colon != -1 ? colon + 1 : 0, slash);
ResourceType type = ResourceType.getEnum(typeName);
if (type == null) {
continue;
}
Resource resource = getResource(type, name);
if (resource != null) {
logger.fine("Marking " + resource + " used because it "
+ "matches string pool constant " + string);
}
markReachable(resource);
continue;
}
// fall through and check the name
}
if (names.contains(name)) {
for (Map<String, Resource> map : typeToName.values()) {
Resource resource = map.get(string);
if (resource != null) {
logger.fine("Marking " + resource + " used because it "
+ "matches string pool constant " + string);
}
markReachable(resource);
}
} else if (Character.isDigit(name.charAt(0))) {
// Just a number? There are cases where it calls getIdentifier by
// a String number; see for example SuggestionsAdapter in the support
// library which reports supporting a string like "2130837524" and
// "android.resource://com.android.alarmclock/2130837524".
try {
int id = Integer.parseInt(name);
if (id != 0) {
markReachable(valueToResource.get(id));
}
} catch (NumberFormatException e) {
// pass
}
}
}
}
private void recordResources(Path resDir)
throws IOException, SAXException, ParserConfigurationException {
File[] resourceFolders = resDir.toFile().listFiles();
if (resourceFolders != null) {
for (File folder : resourceFolders) {
ResourceFolderType folderType = ResourceFolderType.getFolderType(folder.getName());
if (folderType != null) {
recordResources(folderType, folder);
}
}
}
}
private void recordResources(@NonNull ResourceFolderType folderType, File folder)
throws ParserConfigurationException, SAXException, IOException {
File[] files = folder.listFiles();
FolderConfiguration config = FolderConfiguration.getConfigForFolder(folder.getName());
boolean isDefaultFolder = false;
if (config != null) {
isDefaultFolder = true;
for (int i = 0, n = FolderConfiguration.getQualifierCount(); i < n; i++) {
ResourceQualifier qualifier = config.getQualifier(i);
// Densities are special: even if they're present in just (say) drawable-hdpi
// we'll match it on any other density
if (qualifier != null && !(qualifier instanceof DensityQualifier)) {
isDefaultFolder = false;
break;
}
}
}
if (files != null) {
for (File file : files) {
String path = file.getPath();
boolean isXml = endsWithIgnoreCase(path, DOT_XML);
Resource from = null;
// Record resource for the whole file
if (folderType != ResourceFolderType.VALUES
&& (isXml
|| endsWith(path, DOT_PNG) //also true for endsWith(name, DOT_9PNG)
|| endsWith(path, DOT_JPG)
|| endsWith(path, DOT_GIF)
|| endsWith(path, DOT_JPEG)
|| endsWith(path, DOT_SVG))) {
List<ResourceType> types = FolderTypeRelationship.getRelatedResourceTypes(
folderType);
ResourceType type = types.get(0);
assert type != ResourceType.ID : folderType;
String name = file.getName();
name = name.substring(0, name.indexOf('.'));
Resource resource = getResource(type, name);
if (resource != null) {
resource.addLocation(file);
if (isDefaultFolder) {
resource.hasDefault = true;
}
from = resource;
}
}
if (isXml) {
// For value files, and drawables and colors etc also pull in resource
// references inside the file
recordResourcesUsages(file, isDefaultFolder, from);
}
}
}
}
private void recordManifestUsages(Path manifest)
throws IOException, ParserConfigurationException, SAXException {
String xml = Files.toString(manifest.toFile(), UTF_8);
Document document = XmlUtils.parseDocument(xml, true);
recordManifestUsages(document.getDocumentElement());
}
private void recordResourcesUsages(@NonNull File file, boolean isDefaultFolder,
@Nullable Resource from)
throws IOException, ParserConfigurationException, SAXException {
String xml = Files.toString(file, UTF_8);
Document document = XmlUtils.parseDocument(xml, true);
recordResourceReferences(file, isDefaultFolder, document.getDocumentElement(), from);
}
@Nullable
private Resource getResource(@NonNull ResourceType type, @NonNull String name) {
Map<String, Resource> nameMap = typeToName.get(type);
if (nameMap != null) {
return nameMap.get(getFieldName(name));
}
return null;
}
@Nullable
private Resource getResource(@NonNull String possibleUrlReference) {
ResourceUrl url = ResourceUrl.parse(possibleUrlReference);
if (url != null && !url.framework) {
return getResource(url.type, url.name);
}
return null;
}
private void recordManifestUsages(Node node) {
short nodeType = node.getNodeType();
if (nodeType == Node.ELEMENT_NODE) {
Element element = (Element) node;
NamedNodeMap attributes = element.getAttributes();
for (int i = 0, n = attributes.getLength(); i < n; i++) {
Attr attr = (Attr) attributes.item(i);
markReachable(getResource(attr.getValue()));
}
} else if (nodeType == Node.TEXT_NODE) {
// Does this apply to any manifests??
String text = node.getNodeValue().trim();
markReachable(getResource(text));
}
NodeList children = node.getChildNodes();
for (int i = 0, n = children.getLength(); i < n; i++) {
Node child = children.item(i);
recordManifestUsages(child);
}
}
private void recordResourceReferences(@NonNull File file, boolean isDefaultFolder,
@NonNull Node node, @Nullable Resource from) {
short nodeType = node.getNodeType();
if (nodeType == Node.ELEMENT_NODE) {
Element element = (Element) node;
if (from != null) {
NamedNodeMap attributes = element.getAttributes();
for (int i = 0, n = attributes.getLength(); i < n; i++) {
Attr attr = (Attr) attributes.item(i);
Resource resource = getResource(attr.getValue());
if (resource != null) {
from.addReference(resource);
}
}
// Android Wear. We *could* limit ourselves to only doing this in files
// referenced from a manifest meta-data element, e.g.
// <meta-data android:name="com.google.android.wearable.beta.app"
// android:resource="@xml/wearable_app_desc"/>
// but given that that property has "beta" in the name, it seems likely
// to change and therefore hardcoding it for that key risks breakage
// in the future.
if ("rawPathResId".equals(element.getTagName())) {
StringBuilder sb = new StringBuilder();
NodeList children = node.getChildNodes();
for (int i = 0, n = children.getLength(); i < n; i++) {
Node child = children.item(i);
if (child.getNodeType() == Element.TEXT_NODE
|| child.getNodeType() == Element.CDATA_SECTION_NODE) {
sb.append(child.getNodeValue());
}
}
if (sb.length() > 0) {
Resource resource = getResource(ResourceType.RAW, sb.toString().trim());
from.addReference(resource);
}
}
}
Resource definition = getResource(element);
if (definition != null) {
from = definition;
definition.addLocation(file);
if (isDefaultFolder) {
definition.hasDefault = true;
}
}
String tagName = element.getTagName();
if (TAG_STYLE.equals(tagName)) {
if (element.hasAttribute(ATTR_PARENT)) {
String parent = element.getAttribute(ATTR_PARENT);
if (!parent.isEmpty() && !parent.startsWith(ANDROID_STYLE_RESOURCE_PREFIX)
&& !parent.startsWith(PREFIX_ANDROID)) {
String parentStyle = parent;
if (!parentStyle.startsWith(STYLE_RESOURCE_PREFIX)) {
parentStyle = STYLE_RESOURCE_PREFIX + parentStyle;
}
Resource ps = getResource(getFieldName(parentStyle));
if (ps != null && definition != null) {
definition.addReference(ps);
}
}
} else {
// Implicit parent styles by name
String name = getFieldName(element);
while (true) {
int index = name.lastIndexOf('_');
if (index != -1) {
name = name.substring(0, index);
Resource ps = getResource(STYLE_RESOURCE_PREFIX + getFieldName(name));
if (ps != null && definition != null) {
definition.addReference(ps);
}
} else {
break;
}
}
}
}
if (TAG_ITEM.equals(tagName)) {
// In style? If so the name: attribute can be a reference
if (element.getParentNode() != null
&& element.getParentNode().getNodeName().equals(TAG_STYLE)) {
String name = element.getAttributeNS(ANDROID_URI, ATTR_NAME);
if (!name.isEmpty() && !name.startsWith("android:")) {
Resource resource = getResource(ResourceType.ATTR, name);
if (definition == null) {
Element style = (Element) element.getParentNode();
definition = getResource(style);
if (definition != null) {
from = definition;
definition.addReference(resource);
}
}
}
}
}
} else if (nodeType == Node.TEXT_NODE || nodeType == Node.CDATA_SECTION_NODE) {
String text = node.getNodeValue().trim();
Resource textResource = getResource(getFieldName(text));
if (textResource != null && from != null) {
from.addReference(textResource);
}
}
NodeList children = node.getChildNodes();
for (int i = 0, n = children.getLength(); i < n; i++) {
Node child = children.item(i);
recordResourceReferences(file, isDefaultFolder, child, from);
}
}
public static String getFieldName(@NonNull String styleName) {
return styleName.replace('.', '_').replace('-', '_').replace(':', '_');
}
private static void markReachable(@Nullable Resource resource) {
if (resource != null) {
resource.reachable = true;
}
}
private Set<String> mStrings;
private boolean mFoundGetIdentifier;
private void referencedString(@NonNull String string) {
// See if the string is at all eligible; ignore strings that aren't
// identifiers (has java identifier chars and nothing but .:/), or are empty or too long
if (string.isEmpty() || string.length() > 80) {
return;
}
boolean haveIdentifierChar = false;
for (int i = 0, n = string.length(); i < n; i++) {
char c = string.charAt(i);
boolean identifierChar = Character.isJavaIdentifierPart(c);
if (!identifierChar && c != '.' && c != ':' && c != '/') {
// .:/ are for the fully qualified resuorce names
return;
} else if (identifierChar) {
haveIdentifierChar = true;
}
}
if (!haveIdentifierChar) {
return;
}
if (mStrings == null) {
mStrings = Sets.newHashSetWithExpectedSize(300);
}
mStrings.add(string);
}
private void recordUsages(Path jarFile) throws IOException {
if (!jarFile.toFile().exists()) {
return;
}
ZipInputStream zis = null;
try {
FileInputStream fis = new FileInputStream(jarFile.toFile());
try {
zis = new ZipInputStream(fis);
ZipEntry entry = zis.getNextEntry();
while (entry != null) {
String name = entry.getName();
if (name.endsWith(DOT_CLASS)) {
byte[] bytes = ByteStreams.toByteArray(zis);
if (bytes != null) {
ClassReader classReader = new ClassReader(bytes);
classReader.accept(new UsageVisitor(), 0);
}
}
entry = zis.getNextEntry();
}
} finally {
Closeables.close(fis, true);
}
} finally {
Closeables.close(zis, true);
}
}
private void parseResourceTxtFile(Path rTxt, Set<String> resourcePackages) throws IOException {
BufferedReader reader = java.nio.file.Files.newBufferedReader(rTxt, UTF_8);
String line;
while ((line = reader.readLine()) != null) {
String[] tokens = line.split(" ");
ResourceType type = ResourceType.getEnum(tokens[1]);
for (String resourcePackage : resourcePackages) {
resourceClassOwners.put(resourcePackage.replace('.', '/') + "/R$" + type.getName(), type);
}
if (type == ResourceType.STYLEABLE) {
if (tokens[0].equals("int[]")) {
addResource(ResourceType.DECLARE_STYLEABLE, tokens[2], null);
} else {
// TODO(jongerrish): Implement stripping of styleables.
}
} else {
addResource(type, tokens[2], tokens[3]);
}
}
}
private void addResource(@NonNull ResourceType type, @NonNull String name,
@Nullable String value) {
int realValue = value != null ? Integer.decode(value) : -1;
Resource resource = getResource(type, name);
if (resource != null) {
//noinspection VariableNotUsedInsideIf
if (value != null) {
if (resource.value == -1) {
resource.value = realValue;
} else {
assert realValue == resource.value;
}
}
return;
}
resource = new Resource(type, name, realValue);
resources.add(resource);
if (realValue != -1) {
valueToResource.put(realValue, resource);
}
Map<String, Resource> nameMap = typeToName.get(type);
if (nameMap == null) {
nameMap = Maps.newHashMapWithExpectedSize(30);
typeToName.put(type, nameMap);
}
nameMap.put(name, resource);
// TODO: Assert that we don't set the same resource multiple times to different values.
// Could happen if you pass in stale data!
}
@VisibleForTesting
List<Resource> getAllResources() {
return resources;
}
/**
* Metadata about an Android resource
*/
public static class Resource {
/**
* Type of resource
*/
public ResourceType type;
/**
* Name of resource
*/
public String name;
/**
* Integer id location
*/
public int value;
/**
* Whether this resource can be reached from one of the roots (manifest, code)
*/
public boolean reachable;
/**
* Whether this resource has a default definition (e.g. present in a resource folder with no
* qualifiers). For id references, an inline definition (@+id) does not count as a default
* definition.
*/
public boolean hasDefault;
/**
* Resources this resource references. For example, a layout can reference another via an
* include; a style reference in a layout references that layout style, and so on.
*/
public List<Resource> references;
public final List<File> declarations = Lists.newArrayList();
private Resource(ResourceType type, String name, int value) {
this.type = type;
this.name = name;
this.value = value;
}
@Override
public String toString() {
return type + ":" + name + ":" + value;
}
@SuppressWarnings("RedundantIfStatement") // Generated by IDE
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Resource resource = (Resource) o;
if (name != null ? !name.equals(resource.name) : resource.name != null) {
return false;
}
if (type != resource.type) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = type != null ? type.hashCode() : 0;
result = 31 * result + (name != null ? name.hashCode() : 0);
return result;
}
public void addLocation(@NonNull File file) {
declarations.add(file);
}
public void addReference(@Nullable Resource resource) {
if (resource != null) {
if (references == null) {
references = Lists.newArrayList();
} else if (references.contains(resource)) {
return;
}
references.add(resource);
}
}
public String getUrl() {
return '@' + type.getName() + '/' + name;
}
public boolean isRelevantType() {
return type != ResourceType.ID; // && getFolderType() != ResourceFolderType.VALUES;
}
}
private class UsageVisitor extends ClassVisitor {
public UsageVisitor() {
super(Opcodes.ASM5);
}
@Override
public MethodVisitor visitMethod(int access, final String name,
String desc, String signature, String[] exceptions) {
return new MethodVisitor(Opcodes.ASM5) {
@Override
public void visitLdcInsn(Object cst) {
if (cst instanceof Integer) {
Integer value = (Integer) cst;
markReachable(valueToResource.get(value));
} else if (cst instanceof String) {
String string = (String) cst;
referencedString(string);
}
}
@Override
public void visitFieldInsn(int opcode, String owner, String name, String desc) {
if (opcode == Opcodes.GETSTATIC) {
ResourceType type = resourceClassOwners.get(owner);
if (type != null) {
Resource resource = getResource(type, name);
if (resource != null) {
markReachable(resource);
}
}
}
}
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc) {
super.visitMethodInsn(opcode, owner, name, desc);
if (owner.equals("android/content/res/Resources")
&& name.equals("getIdentifier")
&& desc.equals(
"(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)I")) {
mFoundGetIdentifier = true;
// TODO: Check previous instruction and see if we can find a literal
// String; if so, we can more accurately dispatch the resource here
// rather than having to check the whole string pool!
}
}
};
}
}
}
|
src/tools/android/java/com/google/devtools/build/android/ResourceShrinker.java
|
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android;
import static com.android.SdkConstants.ANDROID_STYLE_RESOURCE_PREFIX;
import static com.android.SdkConstants.ANDROID_URI;
import static com.android.SdkConstants.ATTR_NAME;
import static com.android.SdkConstants.ATTR_PARENT;
import static com.android.SdkConstants.ATTR_TYPE;
import static com.android.SdkConstants.DOT_CLASS;
import static com.android.SdkConstants.DOT_GIF;
import static com.android.SdkConstants.DOT_JPEG;
import static com.android.SdkConstants.DOT_JPG;
import static com.android.SdkConstants.DOT_PNG;
import static com.android.SdkConstants.DOT_XML;
import static com.android.SdkConstants.FD_RES_VALUES;
import static com.android.SdkConstants.PREFIX_ANDROID;
import static com.android.SdkConstants.STYLE_RESOURCE_PREFIX;
import static com.android.SdkConstants.TAG_ITEM;
import static com.android.SdkConstants.TAG_RESOURCES;
import static com.android.SdkConstants.TAG_STYLE;
import static com.android.utils.SdkUtils.endsWith;
import static com.android.utils.SdkUtils.endsWithIgnoreCase;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.io.ByteStreams;
import com.google.common.io.Closeables;
import com.google.common.io.Files;
import com.android.annotations.NonNull;
import com.android.annotations.Nullable;
import com.android.annotations.VisibleForTesting;
import com.android.ide.common.resources.ResourceUrl;
import com.android.ide.common.resources.configuration.DensityQualifier;
import com.android.ide.common.resources.configuration.FolderConfiguration;
import com.android.ide.common.resources.configuration.ResourceQualifier;
import com.android.ide.common.xml.XmlPrettyPrinter;
import com.android.resources.FolderTypeRelationship;
import com.android.resources.ResourceFolderType;
import com.android.resources.ResourceType;
import com.android.utils.XmlUtils;
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.FileHandler;
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.xml.parsers.ParserConfigurationException;
/**
* Class responsible for searching through a Gradle built tree (after resource merging, compilation
* and ProGuarding has been completed, but before final .apk assembly), which figures out which
* resources if any are unused, and removes them. <p> It does this by examining <ul> <li>The merged
* manifest, to find root resource references (such as drawables used for activity icons)</li>
* <li>The merged R class (to find the actual integer constants assigned to resources)</li> <li>The
* ProGuard log files (to find the mapping from original symbol names to short names)</li>* <li>The
* merged resources (to find which resources reference other resources, e.g. drawable state lists
* including other drawables, or layouts including other layouts, or styles referencing other
* drawables, or menus items including action layouts, etc.)</li> <li>The ProGuard output classes
* (to find resource references in code that are actually reachable)</li> </ul> From all this, it
* builds up a reference graph, and based on the root references (e.g. from the manifest and from
* the remaining code) it computes which resources are actually reachable in the app, and anything
* that is not reachable is then marked for deletion. <p> A resource is referenced in code if either
* the field R.type.name is referenced (which is the case for non-final resource references, e.g. in
* libraries), or if the corresponding int value is referenced (for final resource values). We check
* this by looking at the ProGuard output classes with an ASM visitor. One complication is that code
* can also call {@code Resources#getIdentifier(String,String,String)} where they can pass in the
* names of resources to look up. To handle this scenario, we use the ClassVisitor to see if there
* are any calls to the specific {@code Resources#getIdentifier} method. If not, great, the usage
* analysis is completely accurate. If we <b>do</b> find one, we check <b>all</b> the string
* constants found anywhere in the app, and look to see if any look relevant. For example, if we
* find the string "string/foo" or "my.pkg:string/foo", we will then mark the string resource named
* foo (if any) as potentially used. Similarly, if we find just "foo" or "/foo", we will mark
* <b>all</b> resources named "foo" as potentially used. However, if the string is "bar/foo" or "
* foo " these strings are ignored. This means we can potentially miss resources usages where the
* resource name is completed computed (e.g. by concatenating individual characters or taking
* substrings of strings that do not look like resource names), but that seems extremely unlikely to
* be a real-world scenario. <p> For now, for reasons detailed in the code, this only applies to
* file-based resources like layouts, menus and drawables, not value-based resources like strings
* and dimensions.
*/
public class ResourceShrinker {
public static final int TYPICAL_RESOURCE_COUNT = 200;
private final Set<String> resourcePackages;
private final Path rTxt;
private final Path classesJar;
private final Path mergedManifest;
private final Path mergedResourceDir;
private final Logger logger;
/**
* The computed set of unused resources
*/
private List<Resource> unused;
/**
* List of all known resources (parsed from R.java)
*/
private List<Resource> resources = Lists.newArrayListWithExpectedSize(TYPICAL_RESOURCE_COUNT);
/**
* Map from R field value to corresponding resource
*/
private Map<Integer, Resource> valueToResource =
Maps.newHashMapWithExpectedSize(TYPICAL_RESOURCE_COUNT);
/**
* Map from resource type to map from resource name to resource object
*/
private Map<ResourceType, Map<String, Resource>> typeToName =
Maps.newEnumMap(ResourceType.class);
/**
* Map from resource class owners (VM format class) to corresponding resource types. This will
* typically be the fully qualified names of the R classes, as well as any renamed versions of
* those discovered in the mapping.txt file from ProGuard
*/
private Map<String, ResourceType> resourceClassOwners = Maps.newHashMapWithExpectedSize(20);
public ResourceShrinker(
Set<String> resourcePackages,
@NonNull Path rTxt,
@NonNull Path classesJar,
@NonNull Path manifest,
@NonNull Path resources,
Path logFile) {
this.resourcePackages = resourcePackages;
this.rTxt = rTxt;
this.classesJar = classesJar;
this.mergedManifest = manifest;
this.mergedResourceDir = resources;
this.logger = Logger.getLogger(getClass().getName());
logger.setLevel(Level.FINE);
if (logFile != null) {
try {
FileHandler fileHandler = new FileHandler(logFile.toString());
fileHandler.setLevel(Level.FINE);
fileHandler.setFormatter(new Formatter(){
@Override public String format(LogRecord record) {
return record.getMessage() + "\n";
}
});
logger.addHandler(fileHandler);
} catch (SecurityException | IOException e) {
logger.warning(String.format("Unable to open '%s' to write log.", logFile));
}
}
}
public void shrink(Path destinationDir) throws IOException,
ParserConfigurationException, SAXException {
parseResourceTxtFile(rTxt, resourcePackages);
recordUsages(classesJar);
recordManifestUsages(mergedManifest);
recordResources(mergedResourceDir);
keepPossiblyReferencedResources();
dumpReferences();
findUnused();
removeUnused(destinationDir);
}
/**
* Remove resources (already identified by {@link #shrink(Path)}).
*
* <p>This task will copy all remaining used resources over from the full resource directory to a
* new reduced resource directory and removes unused values from all value xml files.
*
* @param destination directory to copy resources into; if null, delete resources in place
*/
private void removeUnused(Path destination) throws IOException,
ParserConfigurationException, SAXException {
assert unused != null; // should always call analyze() first
int resourceCount = unused.size() * 4; // *4: account for some resource folder repetition
Set<File> skip = Sets.newHashSetWithExpectedSize(resourceCount);
Set<File> rewrite = Sets.newHashSetWithExpectedSize(resourceCount);
Set<Resource> deleted = Sets.newHashSetWithExpectedSize(resourceCount);
for (Resource resource : unused) {
if (resource.declarations != null) {
for (File file : resource.declarations) {
String folder = file.getParentFile().getName();
ResourceFolderType folderType = ResourceFolderType.getFolderType(folder);
if (folderType != null && folderType != ResourceFolderType.VALUES) {
logger.fine("Deleted unused resource " + file);
assert skip != null;
skip.add(file);
deleted.add(resource);
} else {
// Can't delete values immediately; there can be many resources
// in this file, so we have to process them all
rewrite.add(file);
deleted.add(resource);
}
}
}
}
// Special case the base values.xml folder
File values = new File(mergedResourceDir.toFile(),
FD_RES_VALUES + File.separatorChar + "values.xml");
if (values.exists()) {
rewrite.add(values);
}
Map<File, String> rewritten = Maps.newHashMapWithExpectedSize(rewrite.size());
rewriteXml(rewrite, rewritten);
// TODO(apell): The graph traversal does not mark IDs as reachable or not, so they cannot be
// accurately removed from public.xml, but the declarations may be deleted if they occur in
// other files. IDs should be added to values.xml so that there are no definitions in public.xml
// without declarations.
createStubIds(values, rewritten);
File publicXml = new File(mergedResourceDir.toFile(),
FD_RES_VALUES + File.separatorChar + "public.xml");
trimPublicResources(publicXml, deleted, rewritten);
filteredCopy(mergedResourceDir.toFile(), destination, skip, rewritten);
}
/**
* Deletes unused resources from value XML files.
*/
private void rewriteXml(Set<File> rewrite, Map<File, String> rewritten)
throws IOException, ParserConfigurationException, SAXException {
// Delete value resources: Must rewrite the XML files
for (File file : rewrite) {
String xml = Files.toString(file, UTF_8);
Document document = XmlUtils.parseDocument(xml, true);
Element root = document.getDocumentElement();
if (root != null && TAG_RESOURCES.equals(root.getTagName())) {
List<String> removed = Lists.newArrayList();
stripUnused(root, removed);
logger.fine("Removed " + removed.size() + " unused resources from " + file + ":\n "
+ Joiner.on(", ").join(removed));
String formatted = XmlPrettyPrinter.prettyPrint(document, xml.endsWith("\n"));
rewritten.put(file, formatted);
}
}
}
/**
* Write stub values for IDs to values.xml to match those available in public.xml.
*/
private void createStubIds(File values, Map<File, String> rewritten)
throws IOException, ParserConfigurationException, SAXException {
if (values.exists()) {
String xml = rewritten.get(values);
if (xml == null) {
xml = Files.toString(values, UTF_8);
}
Document document = XmlUtils.parseDocument(xml, true);
Element root = document.getDocumentElement();
for (Resource resource : resources) {
if (resource.type == ResourceType.ID && !resource.hasDefault) {
Element item = document.createElement(TAG_ITEM);
item.setAttribute(ATTR_TYPE, resource.type.getName());
item.setAttribute(ATTR_NAME, resource.name);
root.appendChild(item);
}
}
String formatted = XmlPrettyPrinter.prettyPrint(document, xml.endsWith("\n"));
rewritten.put(values, formatted);
}
}
/**
* Remove public definitions of unused resources.
*/
private void trimPublicResources(File publicXml, Set<Resource> deleted,
Map<File, String> rewritten) throws IOException, ParserConfigurationException, SAXException {
if (publicXml.exists()) {
String xml = rewritten.get(publicXml);
if (xml == null) {
xml = Files.toString(publicXml, UTF_8);
}
Document document = XmlUtils.parseDocument(xml, true);
Element root = document.getDocumentElement();
if (root != null && TAG_RESOURCES.equals(root.getTagName())) {
NodeList children = root.getChildNodes();
for (int i = children.getLength() - 1; i >= 0; i--) {
Node child = children.item(i);
if (child.getNodeType() == Node.ELEMENT_NODE) {
Element resourceElement = (Element) child;
ResourceType type = ResourceType.getEnum(resourceElement.getAttribute(ATTR_TYPE));
String name = resourceElement.getAttribute(ATTR_NAME);
if (type != null && name != null) {
Resource resource = getResource(type, name);
if (resource != null && deleted.contains(resource)) {
root.removeChild(child);
}
}
}
}
}
String formatted = XmlPrettyPrinter.prettyPrint(document, xml.endsWith("\n"));
rewritten.put(publicXml, formatted);
}
}
/**
* Copies one resource directory tree into another; skipping some files, replacing the contents of
* some, and passing everything else through unmodified
*/
private static void filteredCopy(File source, Path destination, Set<File> skip,
Map<File, String> replace) throws IOException {
File destinationFile = destination.toFile();
if (source.isDirectory()) {
File[] children = source.listFiles();
if (children != null) {
if (!destinationFile.exists()) {
boolean success = destinationFile.mkdirs();
if (!success) {
throw new IOException("Could not create " + destination);
}
}
for (File child : children) {
filteredCopy(child, destination.resolve(child.getName()), skip, replace);
}
}
} else if (!skip.contains(source) && source.isFile()) {
String contents = replace.get(source);
if (contents != null) {
Files.write(contents, destinationFile, UTF_8);
} else {
Files.copy(source, destinationFile);
}
}
}
private void stripUnused(Element element, List<String> removed) {
ResourceType type = getResourceType(element);
if (type == ResourceType.ATTR) {
// Not yet properly handled
return;
}
Resource resource = getResource(element);
if (resource != null) {
if (resource.type == ResourceType.DECLARE_STYLEABLE
|| resource.type == ResourceType.ATTR) {
// Don't strip children of declare-styleable; we're not correctly
// tracking field references of the R_styleable_attr fields yet
return;
}
if (!resource.reachable
&& (resource.type == ResourceType.STYLE
|| resource.type == ResourceType.PLURALS
|| resource.type == ResourceType.ARRAY)) {
NodeList children = element.getChildNodes();
for (int i = children.getLength() - 1; i >= 0; i--) {
Node child = children.item(i);
element.removeChild(child);
}
return;
}
}
NodeList children = element.getChildNodes();
for (int i = children.getLength() - 1; i >= 0; i--) {
Node child = children.item(i);
if (child.getNodeType() == Node.ELEMENT_NODE) {
stripUnused((Element) child, removed);
}
}
if (resource != null && !resource.reachable && resource.isRelevantType()) {
removed.add(resource.getUrl());
Node parent = element.getParentNode();
parent.removeChild(element);
}
}
private static String getFieldName(Element element) {
return getFieldName(element.getAttribute(ATTR_NAME));
}
@Nullable
private Resource getResource(Element element) {
ResourceType type = getResourceType(element);
if (type != null) {
String name = getFieldName(element);
return getResource(type, name);
}
return null;
}
private static ResourceType getResourceType(Element element) {
String tagName = element.getTagName();
switch (tagName) {
case TAG_ITEM:
String typeName = element.getAttribute(ATTR_TYPE);
if (!typeName.isEmpty()) {
return ResourceType.getEnum(typeName);
}
break;
case "string-array":
case "integer-array":
return ResourceType.ARRAY;
default:
return ResourceType.getEnum(tagName);
}
return null;
}
private void findUnused() {
List<Resource> roots = Lists.newArrayList();
for (Resource resource : resources) {
if (resource.reachable && resource.type != ResourceType.ID
&& resource.type != ResourceType.ATTR) {
roots.add(resource);
}
}
logger.fine(String.format("The root reachable resources are:\n %s",
Joiner.on(",\n ").join(roots)));
Map<Resource, Boolean> seen = new IdentityHashMap<>(resources.size());
for (Resource root : roots) {
visit(root, seen);
}
List<Resource> unused = Lists.newArrayListWithExpectedSize(resources.size());
for (Resource resource : resources) {
if (!resource.reachable && resource.isRelevantType()) {
unused.add(resource);
}
}
this.unused = unused;
}
private static void visit(Resource root, Map<Resource, Boolean> seen) {
if (seen.containsKey(root)) {
return;
}
seen.put(root, Boolean.TRUE);
root.reachable = true;
if (root.references != null) {
for (Resource referenced : root.references) {
visit(referenced, seen);
}
}
}
private void dumpReferences() {
for (Resource resource : resources) {
if (resource.references != null) {
logger.fine(resource + " => " + resource.references);
}
}
}
private void keepPossiblyReferencedResources() {
if (!mFoundGetIdentifier || mStrings == null) {
// No calls to android.content.res.Resources#getIdentifier; no need
// to worry about string references to resources
return;
}
List<String> strings = new ArrayList<String>(mStrings);
Collections.sort(strings);
logger.fine(String.format("android.content.res.Resources#getIdentifier present: %s",
mFoundGetIdentifier));
logger.fine("Referenced Strings:");
for (String s : strings) {
s = s.trim().replace("\n", "\\n");
if (s.length() > 40) {
s = s.substring(0, 37) + "...";
} else if (s.isEmpty()) {
continue;
}
logger.fine(" " + s);
}
Set<String> names = Sets.newHashSetWithExpectedSize(50);
for (Map<String, Resource> map : typeToName.values()) {
names.addAll(map.keySet());
}
for (String string : mStrings) {
// Check whether the string looks relevant
// We consider three types of strings:
// (1) simple resource names, e.g. "foo" from @layout/foo
// These might be the parameter to a getIdentifier() call, or could
// be composed into a fully qualified resource name for the getIdentifier()
// method. We match these for *all* resource types.
// (2) Relative source names, e.g. layout/foo, from @layout/foo
// These might be composed into a fully qualified resource name for
// getIdentifier().
// (3) Fully qualified resource names of the form package:type/name.
int n = string.length();
boolean justName = true;
boolean haveSlash = false;
for (int i = 0; i < n; i++) {
char c = string.charAt(i);
if (c == '/') {
haveSlash = true;
justName = false;
} else if (c == '.' || c == ':') {
justName = false;
} else if (!Character.isJavaIdentifierPart(c)) {
// This shouldn't happen; we've filtered out these strings in
// the {@link #referencedString} method
assert false : string;
break;
}
}
String name;
if (justName) {
// Check name (below)
name = string;
} else if (!haveSlash) {
// If we have more than just a symbol name, we expect to also see a slash
//noinspection UnnecessaryContinue
continue;
} else {
// Try to pick out the resource name pieces; if we can find the
// resource type unambiguously; if not, just match on names
int slash = string.indexOf('/');
assert slash != -1; // checked with haveSlash above
name = string.substring(slash + 1);
if (name.isEmpty() || !names.contains(name)) {
continue;
}
// See if have a known specific resource type
if (slash > 0) {
int colon = string.indexOf(':');
String typeName = string.substring(colon != -1 ? colon + 1 : 0, slash);
ResourceType type = ResourceType.getEnum(typeName);
if (type == null) {
continue;
}
Resource resource = getResource(type, name);
if (resource != null) {
logger.fine("Marking " + resource + " used because it "
+ "matches string pool constant " + string);
}
markReachable(resource);
continue;
}
// fall through and check the name
}
if (names.contains(name)) {
for (Map<String, Resource> map : typeToName.values()) {
Resource resource = map.get(string);
if (resource != null) {
logger.fine("Marking " + resource + " used because it "
+ "matches string pool constant " + string);
}
markReachable(resource);
}
} else if (Character.isDigit(name.charAt(0))) {
// Just a number? There are cases where it calls getIdentifier by
// a String number; see for example SuggestionsAdapter in the support
// library which reports supporting a string like "2130837524" and
// "android.resource://com.android.alarmclock/2130837524".
try {
int id = Integer.parseInt(name);
if (id != 0) {
markReachable(valueToResource.get(id));
}
} catch (NumberFormatException e) {
// pass
}
}
}
}
private void recordResources(Path resDir)
throws IOException, SAXException, ParserConfigurationException {
File[] resourceFolders = resDir.toFile().listFiles();
if (resourceFolders != null) {
for (File folder : resourceFolders) {
ResourceFolderType folderType = ResourceFolderType.getFolderType(folder.getName());
if (folderType != null) {
recordResources(folderType, folder);
}
}
}
}
private void recordResources(@NonNull ResourceFolderType folderType, File folder)
throws ParserConfigurationException, SAXException, IOException {
File[] files = folder.listFiles();
FolderConfiguration config = FolderConfiguration.getConfigForFolder(folder.getName());
boolean isDefaultFolder = false;
if (config != null) {
isDefaultFolder = true;
for (int i = 0, n = FolderConfiguration.getQualifierCount(); i < n; i++) {
ResourceQualifier qualifier = config.getQualifier(i);
// Densities are special: even if they're present in just (say) drawable-hdpi
// we'll match it on any other density
if (qualifier != null && !(qualifier instanceof DensityQualifier)) {
isDefaultFolder = false;
break;
}
}
}
if (files != null) {
for (File file : files) {
String path = file.getPath();
boolean isXml = endsWithIgnoreCase(path, DOT_XML);
Resource from = null;
// Record resource for the whole file
if (folderType != ResourceFolderType.VALUES
&& (isXml
|| endsWith(path, DOT_PNG) //also true for endsWith(name, DOT_9PNG)
|| endsWith(path, DOT_JPG)
|| endsWith(path, DOT_GIF)
|| endsWith(path, DOT_JPEG))) {
List<ResourceType> types = FolderTypeRelationship.getRelatedResourceTypes(
folderType);
ResourceType type = types.get(0);
assert type != ResourceType.ID : folderType;
String name = file.getName();
name = name.substring(0, name.indexOf('.'));
Resource resource = getResource(type, name);
if (resource != null) {
resource.addLocation(file);
if (isDefaultFolder) {
resource.hasDefault = true;
}
from = resource;
}
}
if (isXml) {
// For value files, and drawables and colors etc also pull in resource
// references inside the file
recordResourcesUsages(file, isDefaultFolder, from);
}
}
}
}
private void recordManifestUsages(Path manifest)
throws IOException, ParserConfigurationException, SAXException {
String xml = Files.toString(manifest.toFile(), UTF_8);
Document document = XmlUtils.parseDocument(xml, true);
recordManifestUsages(document.getDocumentElement());
}
private void recordResourcesUsages(@NonNull File file, boolean isDefaultFolder,
@Nullable Resource from)
throws IOException, ParserConfigurationException, SAXException {
String xml = Files.toString(file, UTF_8);
Document document = XmlUtils.parseDocument(xml, true);
recordResourceReferences(file, isDefaultFolder, document.getDocumentElement(), from);
}
@Nullable
private Resource getResource(@NonNull ResourceType type, @NonNull String name) {
Map<String, Resource> nameMap = typeToName.get(type);
if (nameMap != null) {
return nameMap.get(getFieldName(name));
}
return null;
}
@Nullable
private Resource getResource(@NonNull String possibleUrlReference) {
ResourceUrl url = ResourceUrl.parse(possibleUrlReference);
if (url != null && !url.framework) {
return getResource(url.type, url.name);
}
return null;
}
private void recordManifestUsages(Node node) {
short nodeType = node.getNodeType();
if (nodeType == Node.ELEMENT_NODE) {
Element element = (Element) node;
NamedNodeMap attributes = element.getAttributes();
for (int i = 0, n = attributes.getLength(); i < n; i++) {
Attr attr = (Attr) attributes.item(i);
markReachable(getResource(attr.getValue()));
}
} else if (nodeType == Node.TEXT_NODE) {
// Does this apply to any manifests??
String text = node.getNodeValue().trim();
markReachable(getResource(text));
}
NodeList children = node.getChildNodes();
for (int i = 0, n = children.getLength(); i < n; i++) {
Node child = children.item(i);
recordManifestUsages(child);
}
}
private void recordResourceReferences(@NonNull File file, boolean isDefaultFolder,
@NonNull Node node, @Nullable Resource from) {
short nodeType = node.getNodeType();
if (nodeType == Node.ELEMENT_NODE) {
Element element = (Element) node;
if (from != null) {
NamedNodeMap attributes = element.getAttributes();
for (int i = 0, n = attributes.getLength(); i < n; i++) {
Attr attr = (Attr) attributes.item(i);
Resource resource = getResource(attr.getValue());
if (resource != null) {
from.addReference(resource);
}
}
// Android Wear. We *could* limit ourselves to only doing this in files
// referenced from a manifest meta-data element, e.g.
// <meta-data android:name="com.google.android.wearable.beta.app"
// android:resource="@xml/wearable_app_desc"/>
// but given that that property has "beta" in the name, it seems likely
// to change and therefore hardcoding it for that key risks breakage
// in the future.
if ("rawPathResId".equals(element.getTagName())) {
StringBuilder sb = new StringBuilder();
NodeList children = node.getChildNodes();
for (int i = 0, n = children.getLength(); i < n; i++) {
Node child = children.item(i);
if (child.getNodeType() == Element.TEXT_NODE
|| child.getNodeType() == Element.CDATA_SECTION_NODE) {
sb.append(child.getNodeValue());
}
}
if (sb.length() > 0) {
Resource resource = getResource(ResourceType.RAW, sb.toString().trim());
from.addReference(resource);
}
}
}
Resource definition = getResource(element);
if (definition != null) {
from = definition;
definition.addLocation(file);
if (isDefaultFolder) {
definition.hasDefault = true;
}
}
String tagName = element.getTagName();
if (TAG_STYLE.equals(tagName)) {
if (element.hasAttribute(ATTR_PARENT)) {
String parent = element.getAttribute(ATTR_PARENT);
if (!parent.isEmpty() && !parent.startsWith(ANDROID_STYLE_RESOURCE_PREFIX)
&& !parent.startsWith(PREFIX_ANDROID)) {
String parentStyle = parent;
if (!parentStyle.startsWith(STYLE_RESOURCE_PREFIX)) {
parentStyle = STYLE_RESOURCE_PREFIX + parentStyle;
}
Resource ps = getResource(getFieldName(parentStyle));
if (ps != null && definition != null) {
definition.addReference(ps);
}
}
} else {
// Implicit parent styles by name
String name = getFieldName(element);
while (true) {
int index = name.lastIndexOf('_');
if (index != -1) {
name = name.substring(0, index);
Resource ps = getResource(STYLE_RESOURCE_PREFIX + getFieldName(name));
if (ps != null && definition != null) {
definition.addReference(ps);
}
} else {
break;
}
}
}
}
if (TAG_ITEM.equals(tagName)) {
// In style? If so the name: attribute can be a reference
if (element.getParentNode() != null
&& element.getParentNode().getNodeName().equals(TAG_STYLE)) {
String name = element.getAttributeNS(ANDROID_URI, ATTR_NAME);
if (!name.isEmpty() && !name.startsWith("android:")) {
Resource resource = getResource(ResourceType.ATTR, name);
if (definition == null) {
Element style = (Element) element.getParentNode();
definition = getResource(style);
if (definition != null) {
from = definition;
definition.addReference(resource);
}
}
}
}
}
} else if (nodeType == Node.TEXT_NODE || nodeType == Node.CDATA_SECTION_NODE) {
String text = node.getNodeValue().trim();
Resource textResource = getResource(getFieldName(text));
if (textResource != null && from != null) {
from.addReference(textResource);
}
}
NodeList children = node.getChildNodes();
for (int i = 0, n = children.getLength(); i < n; i++) {
Node child = children.item(i);
recordResourceReferences(file, isDefaultFolder, child, from);
}
}
public static String getFieldName(@NonNull String styleName) {
return styleName.replace('.', '_').replace('-', '_').replace(':', '_');
}
private static void markReachable(@Nullable Resource resource) {
if (resource != null) {
resource.reachable = true;
}
}
private Set<String> mStrings;
private boolean mFoundGetIdentifier;
private void referencedString(@NonNull String string) {
// See if the string is at all eligible; ignore strings that aren't
// identifiers (has java identifier chars and nothing but .:/), or are empty or too long
if (string.isEmpty() || string.length() > 80) {
return;
}
boolean haveIdentifierChar = false;
for (int i = 0, n = string.length(); i < n; i++) {
char c = string.charAt(i);
boolean identifierChar = Character.isJavaIdentifierPart(c);
if (!identifierChar && c != '.' && c != ':' && c != '/') {
// .:/ are for the fully qualified resuorce names
return;
} else if (identifierChar) {
haveIdentifierChar = true;
}
}
if (!haveIdentifierChar) {
return;
}
if (mStrings == null) {
mStrings = Sets.newHashSetWithExpectedSize(300);
}
mStrings.add(string);
}
private void recordUsages(Path jarFile) throws IOException {
if (!jarFile.toFile().exists()) {
return;
}
ZipInputStream zis = null;
try {
FileInputStream fis = new FileInputStream(jarFile.toFile());
try {
zis = new ZipInputStream(fis);
ZipEntry entry = zis.getNextEntry();
while (entry != null) {
String name = entry.getName();
if (name.endsWith(DOT_CLASS)) {
byte[] bytes = ByteStreams.toByteArray(zis);
if (bytes != null) {
ClassReader classReader = new ClassReader(bytes);
classReader.accept(new UsageVisitor(), 0);
}
}
entry = zis.getNextEntry();
}
} finally {
Closeables.close(fis, true);
}
} finally {
Closeables.close(zis, true);
}
}
private void parseResourceTxtFile(Path rTxt, Set<String> resourcePackages) throws IOException {
BufferedReader reader = java.nio.file.Files.newBufferedReader(rTxt, UTF_8);
String line;
while ((line = reader.readLine()) != null) {
String[] tokens = line.split(" ");
ResourceType type = ResourceType.getEnum(tokens[1]);
for (String resourcePackage : resourcePackages) {
resourceClassOwners.put(resourcePackage.replace('.', '/') + "/R$" + type.getName(), type);
}
if (type == ResourceType.STYLEABLE) {
if (tokens[0].equals("int[]")) {
addResource(ResourceType.DECLARE_STYLEABLE, tokens[2], null);
} else {
// TODO(jongerrish): Implement stripping of styleables.
}
} else {
addResource(type, tokens[2], tokens[3]);
}
}
}
private void addResource(@NonNull ResourceType type, @NonNull String name,
@Nullable String value) {
int realValue = value != null ? Integer.decode(value) : -1;
Resource resource = getResource(type, name);
if (resource != null) {
//noinspection VariableNotUsedInsideIf
if (value != null) {
if (resource.value == -1) {
resource.value = realValue;
} else {
assert realValue == resource.value;
}
}
return;
}
resource = new Resource(type, name, realValue);
resources.add(resource);
if (realValue != -1) {
valueToResource.put(realValue, resource);
}
Map<String, Resource> nameMap = typeToName.get(type);
if (nameMap == null) {
nameMap = Maps.newHashMapWithExpectedSize(30);
typeToName.put(type, nameMap);
}
nameMap.put(name, resource);
// TODO: Assert that we don't set the same resource multiple times to different values.
// Could happen if you pass in stale data!
}
@VisibleForTesting
List<Resource> getAllResources() {
return resources;
}
/**
* Metadata about an Android resource
*/
public static class Resource {
/**
* Type of resource
*/
public ResourceType type;
/**
* Name of resource
*/
public String name;
/**
* Integer id location
*/
public int value;
/**
* Whether this resource can be reached from one of the roots (manifest, code)
*/
public boolean reachable;
/**
* Whether this resource has a default definition (e.g. present in a resource folder with no
* qualifiers). For id references, an inline definition (@+id) does not count as a default
* definition.
*/
public boolean hasDefault;
/**
* Resources this resource references. For example, a layout can reference another via an
* include; a style reference in a layout references that layout style, and so on.
*/
public List<Resource> references;
public final List<File> declarations = Lists.newArrayList();
private Resource(ResourceType type, String name, int value) {
this.type = type;
this.name = name;
this.value = value;
}
@Override
public String toString() {
return type + ":" + name + ":" + value;
}
@SuppressWarnings("RedundantIfStatement") // Generated by IDE
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Resource resource = (Resource) o;
if (name != null ? !name.equals(resource.name) : resource.name != null) {
return false;
}
if (type != resource.type) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = type != null ? type.hashCode() : 0;
result = 31 * result + (name != null ? name.hashCode() : 0);
return result;
}
public void addLocation(@NonNull File file) {
declarations.add(file);
}
public void addReference(@Nullable Resource resource) {
if (resource != null) {
if (references == null) {
references = Lists.newArrayList();
} else if (references.contains(resource)) {
return;
}
references.add(resource);
}
}
public String getUrl() {
return '@' + type.getName() + '/' + name;
}
public boolean isRelevantType() {
return type != ResourceType.ID; // && getFolderType() != ResourceFolderType.VALUES;
}
}
private class UsageVisitor extends ClassVisitor {
public UsageVisitor() {
super(Opcodes.ASM5);
}
@Override
public MethodVisitor visitMethod(int access, final String name,
String desc, String signature, String[] exceptions) {
return new MethodVisitor(Opcodes.ASM5) {
@Override
public void visitLdcInsn(Object cst) {
if (cst instanceof Integer) {
Integer value = (Integer) cst;
markReachable(valueToResource.get(value));
} else if (cst instanceof String) {
String string = (String) cst;
referencedString(string);
}
}
@Override
public void visitFieldInsn(int opcode, String owner, String name, String desc) {
if (opcode == Opcodes.GETSTATIC) {
ResourceType type = resourceClassOwners.get(owner);
if (type != null) {
Resource resource = getResource(type, name);
if (resource != null) {
markReachable(resource);
}
}
}
}
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc) {
super.visitMethodInsn(opcode, owner, name, desc);
if (owner.equals("android/content/res/Resources")
&& name.equals("getIdentifier")
&& desc.equals(
"(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)I")) {
mFoundGetIdentifier = true;
// TODO: Check previous instruction and see if we can find a literal
// String; if so, we can more accurately dispatch the resource here
// rather than having to check the whole string pool!
}
}
};
}
}
}
|
Add SVG as an analyzed file type to ResourceShrinker.java
--
MOS_MIGRATED_REVID=128401550
|
src/tools/android/java/com/google/devtools/build/android/ResourceShrinker.java
|
Add SVG as an analyzed file type to ResourceShrinker.java
|
|
Java
|
apache-2.0
|
e97d5db3f0f30d577aded22520ecc4d4e008b313
| 0
|
eFaps/eFaps-Kernel-Install,eFaps/eFaps-Kernel-Install
|
/*
* Copyright 2003 - 2013 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Revision: $Rev:1563 $
* Last Changed: $Date:2007-10-28 15:07:41 +0100 (So, 28 Okt 2007) $
* Last Changed By: $Author:tmo $
*/
package org.efaps.esjp.admin.access;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.efaps.admin.EFapsSystemConfiguration;
import org.efaps.admin.KernelSettings;
import org.efaps.admin.access.AccessCache;
import org.efaps.admin.access.AccessKey;
import org.efaps.admin.access.AccessSet;
import org.efaps.admin.access.AccessType;
import org.efaps.admin.access.AccessTypeEnums;
import org.efaps.admin.datamodel.Classification;
import org.efaps.admin.datamodel.Type;
import org.efaps.admin.event.Parameter;
import org.efaps.admin.event.Parameter.ParameterValues;
import org.efaps.admin.program.esjp.EFapsApplication;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.admin.user.Role;
import org.efaps.db.Context;
import org.efaps.db.Instance;
import org.efaps.db.transaction.ConnectionResource;
import org.efaps.util.EFapsException;
import org.infinispan.Cache;
/**
* This Class is used to check if a user can Access this Type.<br>
* The method execute is called with the Instance and the Accesstype as
* parameters. For the instance object it is checked if the current context user
* has the access defined in the list of access types.
*
* @author The eFaps Team
* @version $Id:SimpleAccessCheckOnType.java 1563 2007-10-28 14:07:41Z tmo $
*/
@EFapsUUID("628a19f6-463f-415d-865b-ba72e303a507")
@EFapsApplication("eFaps-Kernel")
public abstract class SimpleAccessCheckOnType_Base
extends AbstractAccessCheck
{
/**
* {@inheritDoc}
*/
@Override
protected boolean checkAccess(final Parameter _parameter,
final Instance _instance,
final AccessType _accessType)
throws EFapsException
{
boolean ret = false;
final Cache<AccessKey, Boolean> cache = AccessCache.getKeyCache();
final AccessKey accessKey = AccessKey.get(_instance, _accessType);
final Boolean access = cache.get(accessKey);
if (access == null) {
ret = checkAccessOnDB(_parameter, _instance, _accessType);
AbstractAccessCheck_Base.LOG.trace("access result :{} from DB for: {}", ret, _instance);
cache.put(accessKey, ret);
} else {
ret = access;
AbstractAccessCheck_Base.LOG.trace("access result :{} from Cache for: {}", ret, _instance);
}
return ret;
}
/**
* Check for the instance object if the current context user has the access
* defined in the list of access types against the eFaps DataBase.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _instance instance to check for access for
* @param _accessType accesstyep to check the access for
* @return true if access is granted, else false
* @throws EFapsException on error
*/
protected boolean checkAccessOnDB(final Parameter _parameter,
final Instance _instance,
final AccessType _accessType)
throws EFapsException
{
final Context context = Context.getThreadContext();
final StringBuilder cmd = new StringBuilder();
cmd.append("select count(*) from T_ACCESSSET2USER ");
Type type;
if (_parameter.get(ParameterValues.CLASS) instanceof Classification) {
type = (Classification) _parameter.get(ParameterValues.CLASS);
} else {
type = _instance.getType();
}
final Set<Long> users = new HashSet<Long>();
final Set<Role> localRoles = new HashSet<Role>();
boolean noCompCheck = false;
if (type.isCheckStatus() && !_accessType.equals(AccessTypeEnums.CREATE.getAccessType())) {
cmd.append(" join T_ACCESSSET2STATUS on T_ACCESSSET2USER.ACCESSSET = T_ACCESSSET2STATUS.ACCESSSET")
.append(" join ").append(type.getMainTable().getSqlTable())
.append(" on ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getStatusAttribute().getSqlColNames().get(0))
.append(" = T_ACCESSSET2STATUS.ACCESSSTATUS");
} else if (type.isCompanyDependent() && type.getMainTable().getSqlColType() != null
&& !_accessType.equals(AccessTypeEnums.CREATE.getAccessType())) {
// in case that it is companydependent but not status
cmd.append(" join T_ACCESSSET2DMTYPE on T_ACCESSSET2USER.ACCESSSET = T_ACCESSSET2DMTYPE.ACCESSSET ")
.append(" join ").append(type.getMainTable().getSqlTable())
.append(" on ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getMainTable().getSqlColType())
.append(" = T_ACCESSSET2DMTYPE.DMTYPE ");
} else {
noCompCheck = true;
}
cmd.append(" where T_ACCESSSET2USER.ACCESSSET in (0");
for (final AccessSet accessSet : type.getAccessSets()) {
if (accessSet.getAccessTypes().contains(_accessType)) {
cmd.append(",").append(accessSet.getId());
users.addAll(accessSet.getUserIds());
}
}
cmd.append(") ").append("and T_ACCESSSET2USER.USERABSTRACT in (").append(context.getPersonId());
for (final Long roleId : context.getPerson().getRoles()) {
if (users.contains(roleId)) {
cmd.append(",").append(roleId);
final Role role = Role.get(roleId);
if (role.isLocal()) {
localRoles.add(role);
}
}
}
cmd.append(")");
if (type.isCheckStatus() && !_accessType.equals(AccessTypeEnums.CREATE.getAccessType())) {
cmd.append(" and ").append(type.getMainTable().getSqlTable()).append(".ID = ").append(_instance.getId());
}
if (type.isCompanyDependent() && !_accessType.equals(AccessTypeEnums.CREATE.getAccessType())) {
if (noCompCheck) {
AbstractAccessCheck_Base.LOG.error("Cannot check for Company on type '{}'", type);
} else {
cmd.append(" and ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getCompanyAttribute().getSqlColNames().get(0)).append(" in (");
boolean first = true;
for (final Long compId : context.getPerson().getCompanies()) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(compId);
}
cmd.append(")");
}
}
if (type.isGroupDependent() && !_accessType.equals(AccessTypeEnums.CREATE.getAccessType())
&& !localRoles.isEmpty()
&& EFapsSystemConfiguration.get().getAttributeValueAsBoolean(
KernelSettings.ACTIVATE_GROUPS)) {
cmd.append(" and ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getGroupAttribute().getSqlColNames().get(0)).append(" in (")
.append(" select GROUPID from T_USERASSOC where ROLEID in (");
boolean first = true;
for (final Role role : localRoles) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(role.getId());
}
cmd.append(") and GROUPID in (");
first = true;
for (final Long group : context.getPerson().getGroups()) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(group);
}
if (first) {
cmd.append("0");
AbstractAccessCheck_Base.LOG.error("Missing Group for '{}' on groupdependend Access on type '{}'",
context.getPerson().getName(), type);
}
cmd.append("))");
}
AbstractAccessCheck_Base.LOG.debug("cheking access with: {}", cmd);
return executeStatement(_parameter, context, cmd);
}
/**
* {@inheritDoc}
*/
@Override
protected Map<Instance, Boolean> checkAccess(final Parameter _parameter,
final List<?> _instances,
final AccessType _accessType)
throws EFapsException
{
final Map<Instance, Boolean> ret = new HashMap<Instance, Boolean>();
final Cache<AccessKey, Boolean> cache = AccessCache.getKeyCache();
final List<Instance> checkOnDB = new ArrayList<Instance>();
for (final Object instObj : _instances) {
final AccessKey accessKey = AccessKey.get((Instance) instObj, _accessType);
final Boolean access = cache.get(accessKey);
if (access == null) {
checkOnDB.add((Instance) instObj);
} else {
ret.put((Instance) instObj, access);
}
}
AbstractAccessCheck_Base.LOG.trace("access result from Cache: {}", ret);
if (!checkOnDB.isEmpty()) {
final Map<Instance, Boolean> accessMapTmp = checkAccessOnDB(_parameter, checkOnDB, _accessType);
for (final Entry<Instance, Boolean> entry : accessMapTmp.entrySet()) {
final AccessKey accessKey = AccessKey.get(entry.getKey(), _accessType);
cache.put(accessKey, entry.getValue());
}
AbstractAccessCheck_Base.LOG.trace("access result from DB: {}", accessMapTmp);
ret.putAll(accessMapTmp);
}
return ret;
}
/**
* Method to check the access for a list of instances against the efaps DataBase.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _instances instances to be checked
* @param _accessType type of access
* @return map of access to boolean
* @throws EFapsException on error
*/
protected Map<Instance, Boolean> checkAccessOnDB(final Parameter _parameter,
final List<?> _instances,
final AccessType _accessType)
throws EFapsException
{
final Map<Instance, Boolean> accessMap = new HashMap<Instance, Boolean>();
final Context context = Context.getThreadContext();
final Type type = ((Instance) _instances.get(0)).getType();
if (type.isCheckStatus() || type.isCompanyDependent()) {
final Set<Long> users = new HashSet<Long>();
final Set<Role> localRoles = new HashSet<Role>();
final StringBuilder cmd = new StringBuilder();
cmd.append("select ").append(type.getMainTable().getSqlTable()).append(".ID ")
.append(" from T_ACCESSSET2USER ");
boolean noCompCheck = false;
if (type.isCheckStatus()) {
cmd.append(" join T_ACCESSSET2STATUS on T_ACCESSSET2USER.ACCESSSET = T_ACCESSSET2STATUS.ACCESSSET")
.append(" join ").append(type.getMainTable().getSqlTable()).append(" on ")
.append(type.getMainTable().getSqlTable()).append(".")
.append(type.getStatusAttribute().getSqlColNames().get(0))
.append(" = T_ACCESSSET2STATUS.ACCESSSTATUS");
} else if (type.isCompanyDependent() && type.getMainTable().getSqlColType() != null) {
// in case that it is companydependent but not status
cmd.append(" join T_ACCESSSET2DMTYPE on T_ACCESSSET2USER.ACCESSSET = T_ACCESSSET2DMTYPE.ACCESSSET ")
.append(" join ").append(type.getMainTable().getSqlTable())
.append(" on ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getMainTable().getSqlColType())
.append(" = T_ACCESSSET2DMTYPE.DMTYPE ");
} else {
noCompCheck = true;
}
cmd.append(" where T_ACCESSSET2USER.ACCESSSET in (0");
for (final AccessSet accessSet : type.getAccessSets()) {
if (accessSet.getAccessTypes().contains(_accessType)) {
cmd.append(",").append(accessSet.getId());
users.addAll(accessSet.getUserIds());
}
}
cmd.append(") ").append("and T_ACCESSSET2USER.USERABSTRACT in (").append(context.getPersonId());
for (final Long roleId : context.getPerson().getRoles()) {
if (users.contains(roleId)) {
cmd.append(",").append(roleId);
final Role role = Role.get(roleId);
if (role.isLocal()) {
localRoles.add(role);
}
}
}
cmd.append(")");
if (type.isCompanyDependent()) {
if (noCompCheck) {
AbstractAccessCheck_Base.LOG.error("Cannot check for Company on type '{}'", type);
} else {
cmd.append(" and ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getCompanyAttribute().getSqlColNames().get(0)).append(" in (");
boolean first = true;
for (final Long compId : context.getPerson().getCompanies()) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(compId);
}
cmd.append(")");
}
}
// add the check for groups if: the type is group depended, a local
// role is defined for the user, the group mechanism is activated
if (type.isGroupDependent() && !localRoles.isEmpty()
&& EFapsSystemConfiguration.get().getAttributeValueAsBoolean(
KernelSettings.ACTIVATE_GROUPS)) {
cmd.append(" and ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getGroupAttribute().getSqlColNames().get(0)).append(" in (")
.append(" select GROUPID from T_USERASSOC where ROLEID in (");
boolean first = true;
for (final Role role : localRoles) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(role.getId());
}
cmd.append(") and GROUPID in (");
first = true;
for (final Long group : context.getPerson().getGroups()) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(group);
}
if (first) {
cmd.append("0");
AbstractAccessCheck_Base.LOG.error("Missing Group for '{}' on groupdependend Access on type '{}'",
context.getPerson().getName(), type);
}
cmd.append("))");
}
final Set<Long> idList = new HashSet<Long>();
ConnectionResource con = null;
try {
con = context.getConnectionResource();
Statement stmt = null;
try {
AbstractAccessCheck_Base.LOG.debug("Checking access with: {}", cmd);
stmt = con.getConnection().createStatement();
final ResultSet rs = stmt.executeQuery(cmd.toString());
while (rs.next()) {
idList.add(rs.getLong(1));
}
rs.close();
} finally {
if (stmt != null) {
stmt.close();
}
}
con.commit();
} catch (final SQLException e) {
AbstractAccessCheck_Base.LOG.error("sql statement '" + cmd.toString() + "' not executable!", e);
} finally {
if ((con != null) && con.isOpened()) {
con.abort();
}
for (final Object inst : _instances) {
accessMap.put((Instance) inst, idList.contains(((Instance) inst).getId()));
}
}
} else {
final boolean access = checkAccess(_parameter, (Instance) _instances.get(0), _accessType);
for (final Object inst : _instances) {
accessMap.put((Instance) inst, access);
}
}
return accessMap;
}
/**
* Method that queries against the database.
* @param _parameter Parameter as passed by the eFasp API
* @param _context Context
* @param _cmd cmd
* @return true if access granted else false
* @throws EFapsException on error
*/
protected boolean executeStatement(final Parameter _parameter,
final Context _context,
final StringBuilder _cmd)
throws EFapsException
{
boolean hasAccess = false;
ConnectionResource con = null;
try {
con = _context.getConnectionResource();
AbstractAccessCheck_Base.LOG.debug("Checking access with: {}", _cmd);
Statement stmt = null;
try {
stmt = con.getConnection().createStatement();
final ResultSet rs = stmt.executeQuery(_cmd.toString());
if (rs.next()) {
hasAccess = (rs.getLong(1) > 0) ? true : false;
}
rs.close();
} finally {
if (stmt != null) {
stmt.close();
}
}
con.commit();
} catch (final SQLException e) {
AbstractAccessCheck_Base.LOG.error("sql statement '" + _cmd.toString() + "' not executable!", e);
} finally {
if ((con != null) && con.isOpened()) {
con.abort();
}
}
return hasAccess;
}
}
|
src/main/efaps/ESJP/org/efaps/esjp/admin/access/SimpleAccessCheckOnType_Base.java
|
/*
* Copyright 2003 - 2013 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Revision: $Rev:1563 $
* Last Changed: $Date:2007-10-28 15:07:41 +0100 (So, 28 Okt 2007) $
* Last Changed By: $Author:tmo $
*/
package org.efaps.esjp.admin.access;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.efaps.admin.EFapsSystemConfiguration;
import org.efaps.admin.KernelSettings;
import org.efaps.admin.access.AccessCache;
import org.efaps.admin.access.AccessKey;
import org.efaps.admin.access.AccessSet;
import org.efaps.admin.access.AccessType;
import org.efaps.admin.access.AccessTypeEnums;
import org.efaps.admin.datamodel.Classification;
import org.efaps.admin.datamodel.Type;
import org.efaps.admin.event.Parameter;
import org.efaps.admin.event.Parameter.ParameterValues;
import org.efaps.admin.program.esjp.EFapsApplication;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.admin.user.Role;
import org.efaps.db.Context;
import org.efaps.db.Instance;
import org.efaps.db.transaction.ConnectionResource;
import org.efaps.util.EFapsException;
import org.infinispan.Cache;
/**
* This Class is used to check if a user can Access this Type.<br>
* The method execute is called with the Instance and the Accesstype as
* parameters. For the instance object it is checked if the current context user
* has the access defined in the list of access types.
*
* @author The eFaps Team
* @version $Id:SimpleAccessCheckOnType.java 1563 2007-10-28 14:07:41Z tmo $
*/
@EFapsUUID("628a19f6-463f-415d-865b-ba72e303a507")
@EFapsApplication("eFaps-Kernel")
public abstract class SimpleAccessCheckOnType_Base
extends AbstractAccessCheck
{
/**
* {@inheritDoc}
*/
@Override
protected boolean checkAccess(final Parameter _parameter,
final Instance _instance,
final AccessType _accessType)
throws EFapsException
{
boolean ret = false;
final Cache<AccessKey, Boolean> cache = AccessCache.getKeyCache();
final AccessKey accessKey = AccessKey.get(_instance, _accessType);
final Boolean access = cache.get(accessKey);
if (access == null) {
ret = checkAccessOnDB(_parameter, _instance, _accessType);
AbstractAccessCheck_Base.LOG.trace("access result :{} from DB for: {}", ret, _instance);
cache.put(accessKey, ret);
} else {
ret = access;
AbstractAccessCheck_Base.LOG.trace("access result :{} from Cache for: {}", ret, _instance);
}
return ret;
}
/**
* Check for the instance object if the current context user has the access
* defined in the list of access types against the eFaps DataBase.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _instance instance to check for access for
* @param _accessType accesstyep to check the access for
* @return true if access is granted, else false
* @throws EFapsException on error
*/
protected boolean checkAccessOnDB(final Parameter _parameter,
final Instance _instance,
final AccessType _accessType)
throws EFapsException
{
final Context context = Context.getThreadContext();
final StringBuilder cmd = new StringBuilder();
cmd.append("select count(*) from T_ACCESSSET2USER ");
Type type;
if (_parameter.get(ParameterValues.CLASS) instanceof Classification) {
type = (Classification) _parameter.get(ParameterValues.CLASS);
} else {
type = _instance.getType();
}
final Set<Long> users = new HashSet<Long>();
final Set<Role> localRoles = new HashSet<Role>();
boolean noCompCheck = false;
if (type.isCheckStatus() && !_accessType.equals(AccessTypeEnums.CREATE.getAccessType())) {
cmd.append(" join T_ACCESSSET2STATUS on T_ACCESSSET2USER.ACCESSSET = T_ACCESSSET2STATUS.ACCESSSET")
.append(" join ").append(type.getMainTable().getSqlTable())
.append(" on ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getStatusAttribute().getSqlColNames().get(0))
.append(" = T_ACCESSSET2STATUS.ACCESSSTATUS");
} else if (type.isCompanyDependent() && type.getMainTable().getSqlColType() != null
&& !_accessType.equals(AccessTypeEnums.CREATE.getAccessType())) {
// in case that it is companydependent but not status
cmd.append(" join T_ACCESSSET2DMTYPE on T_ACCESSSET2USER.ACCESSSET = T_ACCESSSET2DMTYPE.ACCESSSET ")
.append(" join ").append(type.getMainTable().getSqlTable())
.append(" on ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getMainTable().getSqlColType())
.append(" = T_ACCESSSET2DMTYPE.DMTYPE ");
} else {
noCompCheck = true;
}
cmd.append(" where T_ACCESSSET2USER.ACCESSSET in (0");
for (final AccessSet accessSet : type.getAccessSets()) {
if (accessSet.getAccessTypes().contains(_accessType)) {
cmd.append(",").append(accessSet.getId());
users.addAll(accessSet.getUserIds());
}
}
cmd.append(") ").append("and T_ACCESSSET2USER.USERABSTRACT in (").append(context.getPersonId());
for (final Long roleId : context.getPerson().getRoles()) {
if (users.contains(roleId)) {
cmd.append(",").append(roleId);
final Role role = Role.get(roleId);
if (role.isLocal()) {
localRoles.add(role);
}
}
}
cmd.append(")");
if (type.isCheckStatus() && !_accessType.equals(AccessTypeEnums.CREATE.getAccessType())) {
cmd.append(" and ").append(type.getMainTable().getSqlTable()).append(".ID = ").append(_instance.getId());
}
if (type.isCompanyDependent() && !_accessType.equals(AccessTypeEnums.CREATE.getAccessType())) {
if (noCompCheck) {
AbstractAccessCheck_Base.LOG.error("Cannot check for Company on type '{}'", type);
} else {
cmd.append(" and ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getCompanyAttribute().getSqlColNames().get(0)).append(" in (");
boolean first = true;
for (final Long compId : context.getPerson().getCompanies()) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(compId);
}
cmd.append(")");
}
}
if (type.isGroupDependent() && !_accessType.equals(AccessTypeEnums.CREATE.getAccessType())
&& !localRoles.isEmpty()
&& EFapsSystemConfiguration.get().getAttributeValueAsBoolean(
KernelSettings.ACTIVATE_GROUPS)) {
cmd.append(" and ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getGroupAttribute().getSqlColNames().get(0)).append(" in (")
.append(" select GROUPID from T_USERASSOC where ROLEID in (");
boolean first = true;
for (final Role role : localRoles) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(role.getId());
}
cmd.append(") and GROUPID in (");
first = true;
for (final Long group : context.getPerson().getGroups()) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(group);
}
if (first) {
cmd.append("0");
AbstractAccessCheck_Base.LOG.error("Missing Group for '{}' on groupdependend Access on type '{}'",
context.getPerson().getName(), type);
}
cmd.append("))");
}
AbstractAccessCheck_Base.LOG.debug("cheking access with: {}", cmd);
return executeStatement(_parameter, context, cmd);
}
/**
* {@inheritDoc}
*/
@Override
protected Map<Instance, Boolean> checkAccess(final Parameter _parameter,
final List<?> _instances,
final AccessType _accessType)
throws EFapsException
{
final Map<Instance, Boolean> ret = new HashMap<Instance, Boolean>();
final Cache<AccessKey, Boolean> cache = AccessCache.getKeyCache();
final List<Instance> checkOnDB = new ArrayList<Instance>();
for (final Object instObj : _instances) {
final AccessKey accessKey = AccessKey.get((Instance) instObj, _accessType);
final Boolean access = cache.get(accessKey);
if (access == null) {
checkOnDB.add((Instance) instObj);
} else {
ret.put((Instance) instObj, access);
}
}
AbstractAccessCheck_Base.LOG.trace("access result from Cache: {}", ret);
if (!checkOnDB.isEmpty()) {
final Map<Instance, Boolean> accessMapTmp = checkAccessOnDB(_parameter, checkOnDB, _accessType);
for (final Entry<Instance, Boolean> entry : accessMapTmp.entrySet()) {
final AccessKey accessKey = AccessKey.get(entry.getKey(), _accessType);
cache.put(accessKey, entry.getValue());
}
AbstractAccessCheck_Base.LOG.trace("access result from DB: {}", accessMapTmp);
ret.putAll(accessMapTmp);
}
return ret;
}
/**
* Method to check the access for a list of instances against the efaps DataBase.
*
* @param _parameter Parameter as passed by the eFaps API
* @param _instances instances to be checked
* @param _accessType type of access
* @return map of access to boolean
* @throws EFapsException on error
*/
protected Map<Instance, Boolean> checkAccessOnDB(final Parameter _parameter,
final List<?> _instances,
final AccessType _accessType)
throws EFapsException
{
final Map<Instance, Boolean> accessMap = new HashMap<Instance, Boolean>();
final Context context = Context.getThreadContext();
final Type type = ((Instance) _instances.get(0)).getType();
if (type.isCheckStatus() || type.isCompanyDependent()) {
final Set<Long> users = new HashSet<Long>();
final Set<Role> localRoles = new HashSet<Role>();
final StringBuilder cmd = new StringBuilder();
cmd.append("select ").append(type.getMainTable().getSqlTable()).append(".ID ")
.append(" from T_ACCESSSET2USER ")
.append(" join T_ACCESSSET2STATUS on T_ACCESSSET2USER.ACCESSSET = T_ACCESSSET2STATUS.ACCESSSET");
boolean noCompCheck = false;
if (type.isCheckStatus()) {
cmd.append(" join ").append(type.getMainTable().getSqlTable()).append(" on ")
.append(type.getMainTable().getSqlTable()).append(".")
.append(type.getStatusAttribute().getSqlColNames().get(0))
.append(" = T_ACCESSSET2STATUS.ACCESSSTATUS");
} else if (type.isCompanyDependent() && type.getMainTable().getSqlColType() != null) {
// in case that it is companydependent but not status
cmd.append(" join T_ACCESSSET2DMTYPE on T_ACCESSSET2USER.ACCESSSET = T_ACCESSSET2DMTYPE.ACCESSSET ")
.append(" join ").append(type.getMainTable().getSqlTable())
.append(" on ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getMainTable().getSqlColType())
.append(" = T_ACCESSSET2DMTYPE.DMTYPE ");
} else {
noCompCheck = true;
}
cmd.append(" where T_ACCESSSET2USER.ACCESSSET in (0");
for (final AccessSet accessSet : type.getAccessSets()) {
if (accessSet.getAccessTypes().contains(_accessType)) {
cmd.append(",").append(accessSet.getId());
users.addAll(accessSet.getUserIds());
}
}
cmd.append(") ").append("and T_ACCESSSET2USER.USERABSTRACT in (").append(context.getPersonId());
for (final Long roleId : context.getPerson().getRoles()) {
if (users.contains(roleId)) {
cmd.append(",").append(roleId);
final Role role = Role.get(roleId);
if (role.isLocal()) {
localRoles.add(role);
}
}
}
cmd.append(")");
if (type.isCompanyDependent()) {
if (noCompCheck) {
AbstractAccessCheck_Base.LOG.error("Cannot check for Company on type '{}'", type);
} else {
cmd.append(" and ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getCompanyAttribute().getSqlColNames().get(0)).append(" in (");
boolean first = true;
for (final Long compId : context.getPerson().getCompanies()) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(compId);
}
cmd.append(")");
}
}
// add the check for groups if: the type is group depended, a local
// role is defined for the user, the group mechanism is activated
if (type.isGroupDependent() && !localRoles.isEmpty()
&& EFapsSystemConfiguration.get().getAttributeValueAsBoolean(
KernelSettings.ACTIVATE_GROUPS)) {
cmd.append(" and ").append(type.getMainTable().getSqlTable()).append(".")
.append(type.getGroupAttribute().getSqlColNames().get(0)).append(" in (")
.append(" select GROUPID from T_USERASSOC where ROLEID in (");
boolean first = true;
for (final Role role : localRoles) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(role.getId());
}
cmd.append(") and GROUPID in (");
first = true;
for (final Long group : context.getPerson().getGroups()) {
if (first) {
first = false;
} else {
cmd.append(",");
}
cmd.append(group);
}
if (first) {
cmd.append("0");
AbstractAccessCheck_Base.LOG.error("Missing Group for '{}' on groupdependend Access on type '{}'",
context.getPerson().getName(), type);
}
cmd.append("))");
}
final Set<Long> idList = new HashSet<Long>();
ConnectionResource con = null;
try {
con = context.getConnectionResource();
Statement stmt = null;
try {
AbstractAccessCheck_Base.LOG.debug("Checking access with: {}", cmd);
stmt = con.getConnection().createStatement();
final ResultSet rs = stmt.executeQuery(cmd.toString());
while (rs.next()) {
idList.add(rs.getLong(1));
}
rs.close();
} finally {
if (stmt != null) {
stmt.close();
}
}
con.commit();
} catch (final SQLException e) {
AbstractAccessCheck_Base.LOG.error("sql statement '" + cmd.toString() + "' not executable!", e);
} finally {
if ((con != null) && con.isOpened()) {
con.abort();
}
for (final Object inst : _instances) {
accessMap.put((Instance) inst, idList.contains(((Instance) inst).getId()));
}
}
} else {
final boolean access = checkAccess(_parameter, (Instance) _instances.get(0), _accessType);
for (final Object inst : _instances) {
accessMap.put((Instance) inst, access);
}
}
return accessMap;
}
/**
* Method that queries against the database.
* @param _parameter Parameter as passed by the eFasp API
* @param _context Context
* @param _cmd cmd
* @return true if access granted else false
* @throws EFapsException on error
*/
protected boolean executeStatement(final Parameter _parameter,
final Context _context,
final StringBuilder _cmd)
throws EFapsException
{
boolean hasAccess = false;
ConnectionResource con = null;
try {
con = _context.getConnectionResource();
AbstractAccessCheck_Base.LOG.debug("Checking access with: {}", _cmd);
Statement stmt = null;
try {
stmt = con.getConnection().createStatement();
final ResultSet rs = stmt.executeQuery(_cmd.toString());
if (rs.next()) {
hasAccess = (rs.getLong(1) > 0) ? true : false;
}
rs.close();
} finally {
if (stmt != null) {
stmt.close();
}
}
con.commit();
} catch (final SQLException e) {
AbstractAccessCheck_Base.LOG.error("sql statement '" + _cmd.toString() + "' not executable!", e);
} finally {
if ((con != null) && con.isOpened()) {
con.abort();
}
}
return hasAccess;
}
}
|
- Issue #38: The check for simple access does return wrong value if not status dependend
closes #38
|
src/main/efaps/ESJP/org/efaps/esjp/admin/access/SimpleAccessCheckOnType_Base.java
|
- Issue #38: The check for simple access does return wrong value if not status dependend
|
|
Java
|
apache-2.0
|
22d87aa2d51a037ac45964ab0659f0fa6d97ed14
| 0
|
thomasmaurel/ensj-healthcheck,Ensembl/ensj-healthcheck,Ensembl/ensj-healthcheck,thomasmaurel/ensj-healthcheck,thomasmaurel/ensj-healthcheck,Ensembl/ensj-healthcheck,thomasmaurel/ensj-healthcheck,Ensembl/ensj-healthcheck
|
/*
* Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ensembl.healthcheck;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
public enum Species {
// defined new Species and properties: taxonomy_id, assemblyprefix, stableIDprefix, alias
AEDES_AEGYPTI(7159, "", "IGNORE", "aedes,aedesaegypti,aedes_aegypti"),
AILUROPODA_MELANOLEUCA(9646, "ailMel", "ENSAME", "panda,giant panda,ailuropoda melanoleuca,ailuropoda_melanoleuca"),
ANAS_PLATYRHYNCHOS(8839, "BGI_duck", "ENSAPL","anapla,apla,mallard,anas_platyrhynchos,aplatyrhynchos,duck,anas platyrhynchos"),
ANOLIS_CAROLINENSIS(28377, "AnoCar", "ENSACA", "lizard,anole,anolis_lizard,anolis,anolis_carolinensis"),
ANOPHELES_GAMBIAE(7165, "AgamP", "IGNORE", "mosquito,anopheles,agambiae,anophelesgambiae,anopheles_gambiae"),
APIS_MELLIFERA(7460, "AMEL", "IGNORE", "honeybee,honey_bee,apis,amellifera,apismellifera,apis_mellifera"),
ASTYANAX_MEXICANUS(7994, "AstMex", "ENSAMX", "amex,amexicanus,astmex,astyanax mexicanus,astyanax_mexicanus,cave fish"),
BOS_TAURUS(9913, "UMD", "ENSBTA", "cow,btaurus,bostaurus,bos_taurus"),
CAENORHABDITIS_BRIGGSAE(6238, "CBR", "IGNORE", "briggsae,cbriggsae,caenorhabditisbriggsae,caenorhabditis_briggsae"),
CAENORHABDITIS_ELEGANS(6239, "WBcel", "IGNORE", "elegans,celegans,caenorhabditiselegans,caenorhabditis_elegans"),
CALLITHRIX_JACCHUS(9483, "C_jacchus", "ENSCJA", "marmoset,white-tufted-ear marmoset,callithrix_jacchus,callithrix jacchus,Callithrix_jacchus,Callithrix jacchus,callithrix"),
CANIS_FAMILIARIS(9615, "CanFam", "ENSCAF", "dog,doggy,cfamiliaris,canisfamiliaris,canis_familiaris"),
CAVIA_PORCELLUS(10141, "CAVPOR", "ENSCPO", "guineapig,guinea_pig,cporcellus,cavia_porcellus"),
CERATOTHERIUM_SIMUM_SIMUM(73337,"CerSimSim","ENSCSI","ceratotherium simum simum,ceratotherium_simum_simum,cersim,csim,csimum_simum,rhinoceros"),
CHOLOEPUS_HOFFMANNI(9358, "choHof", "ENSCHO", "Sloth,Two-toed_sloth,Hoffmans_two-fingered_sloth,choloepus_hoffmanni"),
CHLOROCEBUS_SABAEUS(60711,"ChlSab","ENSCSA","chlorocebus_sabaeus,chlorocebus_aethiops_sabaeus,vervet monkey,african green monkey,green monkey"),
CIONA_INTESTINALIS(7719, "KH", "ENSCIN", "cionaintestinalis,ciona_int,ciona_intestinalis"),
CIONA_SAVIGNYI(51511, "CSAV", "ENSCSAV", "savignyi,cionasavignyi,csavignyi,ciona_savignyi"),
CRICETULUS_GRISEUS(10029, "CriGri", "ENSCGR", "hamster,chinese_hamster,cgriseus,cricetulus_griseus"),
CULEX_PIPIENS(7175, "CpiJ", "CPIJ", "culex,culexpipiens,culex_pipiens"),
DANIO_RERIO(7955, "GRCz", "ENSDAR", "zebrafish,danio,drerio,daniorerio,danio_rerio"),
DASYPUS_NOVEMCINCTUS(9361, "DasNov", "ENSDNO", "armadillo,daisy,dasypus,nine_banded_armadillo,nine-banded_armadillo,texas_armadillo,dasypus_novemcinctus"),
DIPODOMYS_ORDII(10020, "DIPORD", "ENSDOR", "ords_kangaroo_rat,ordskangaroorat,kangaroo_rat, kangaroorat , dipodomys_ordii"),
DROSOPHILA_ANANASSAE(7217, "dana", "IGNORE", "drosophila,ananassae,drosophilaananassae,drosophila_ananassae,dana"),
DROSOPHILA_GRIMSHAWI(7222, "dgri", "IGNORE", "drosophila,grimshawi,drosophilagrimshawi,drosophila_grimshawi,dgri"),
DROSOPHILA_MELANOGASTER(7227, "BDGP", "IGNORE", "drosophila,dmelongaster,drosophilamelanogaster,drosophila_melanogaster"),
DROSOPHILA_PSEUDOOBSCURA(7237, "BCM-HGSC", "IGNORE", "drosophila,pseudoobscura,drosophilapseudoobscura,drosophila_pseudoobscura,dpse"),
DROSOPHILA_WILLISTONI(7260, "dwil", "IGNORE", "drosophila,willistoni,drosophilawillistonii,drosophila_willistoni,dwil"),
DROSOPHILA_YAKUBA(7245, "dyak", "IGNORE", "drosophila,yakuba,drosophilayakuba,drosophila_yakuba,dyak"),
ECHINOPS_TELFAIRI(9371, "TENREC", "ENSETE", "tenrec,echinops,small_madagascar_hedgehog,lesser_hedgehog_tenrec,echinops_telfairi"),
EQUUS_CABALLUS(9796, "EquCab", "ENSECA", "horse,equus,mr_ed,ecaballus,equus_caballus"),
ERINACEUS_EUROPAEUS(9365, "HEDGEHOG", "ENSEEU", "hedgehog,european_hedgehog,eeuropaeus,erinaceus_europaeus"),
FICEDULA_ALBICOLLIS(59894, "FicAlb", "ENSFAL", "flycatcher,falbicollis,collared_flycatcher,f_albicollis,ficalb"),
FELIS_CATUS(9685, "Felis_catus", "ENSFCA", "cat,fcatus,felis,domestic_cat,felis_catus"),
GADUS_MORHUA(8049, "gadMor", "ENSGMO", "cod,gadus_morhua,gmorhua,atlantic_cod"),
GALLUS_GALLUS(9031, "Galgal", "ENSGAL", "chicken,chick,ggallus,gallusgallus,gallus_gallus"),
GASTEROSTEUS_ACULEATUS(69293, "BROADS", "ENSGAC", "stickleback,gas_aculeatus,gasaculeatus,gasterosteusaculeatus,gasterosteus_aculeatus"),
GORILLA_GORILLA(9595, "gorGor", "ENSGGO", "gorilla,gorilla_gorilla,ggor"),
HETEROCEPHALUS_GLABER(10181, "HetGla", "ENSHGL", "naked_mole_rat,heterocephalus_glaber,hglaber"),
HOMO_SAPIENS(9606, "GRCh", "ENS", "human,hsapiens,homosapiens,homo_sapiens"),
ICTIDOMYS_TRIDECEMLINEATUS(43179, "spetri", "ENSSTO", "squirrel,stridecemlineatus,thirteen-lined_ground_squirrel,ictidomys_tridecemlineatus_arenicola,ictidomys_tridecemlineatus"),
LATIMERIA_CHALUMNAE(7897,"LatCha", "ENSLAC","coelacanth,latimeria_chalumnae,latimeria,l_chalumnae,Latimeria chalumnae"),
LEPISOSTEUS_OCULATUS(7918, "LepOcu","ENSLOC","spotted_gar"),
LOXODONTA_AFRICANA(9785, "LoxAfr", "ENSLAF", "elephant,nelly,loxodonta,african_elephant,african_savannah_elephant,african_bush_elephant,loxodonta_africana"),
MACACA_MULATTA(9544, "MM", "ENSMMU", "macacamulatta,rhesusmacaque,rhesus_macaque,macaque,macaca_mulatta"),
MACROPUS_EUGENII(9315, "Meug", "ENSMEU", "wallaby,tammar_wallaby,macropuseugenii,m_eugenii,tammarwallaby,Macropus eugenii,macropus_eugenii"),
MELEAGRIS_GALLOPAVO(9103, "UMD", "ENSMGA", "turkey,common turkey,wild turkey,meleagris_gallopavo, meleagris_gallopavo"),
MELOPSITTACUS_UNDULATUS(13146, "MelUnd", "ENSMUN", "budgerigar,melopsittacus_undulatus,mundulatus"),
MICROCEBUS_MURINUS(30608, "micMur", "ENSMIC", "mouse_lemur,mouselemur,microcebus,microcebus_murinus"),
MONODELPHIS_DOMESTICA(13616, "BROADO", "ENSMOD", "opossum,monodelphis,mdomestica,mdomesticus,monodelphisdomestica,monodelphisdomesticus,monodelphis_domesticus,monodelphis_domestica"),
MUS_MUSCULUS(10090, "GRCm", "ENSMUS", "mouse,mmusculus,musmusculus,mus_musculus"),
MUSTELA_PUTORIUS_FURO(9669, "MusPutFur", "ENSMPU", "ferret,domestic ferret,Mustela_putorius_furo,Mustela putorius furo"),
MYOTIS_LUCIFUGUS(59463, "Myoluc", "ENSMLU", "microbat,little_brown_bat,mlucifugus,myotis,myotis_lucifugus"),
NOMASCUS_LEUCOGENYS(61853, "NLEU", "ENSNLE","gibbon,nleu,nomleu,nleugogenys,nomascus_leucogenys"),
OCHOTONA_PRINCEPS(9978, "PIKA", "ENSOPR", "pika,Americanpika,American_pika,oprinceps,ochotona,ochotona_princeps"),
OREOCHROMIS_NILOTICUS(8128, "Orenil", "ENSONI", "tilapia,Oreochromis niloticus,oreochromis niloticus,Oreochromis_niloticus,oreochromis_niloticus,Nile tilapia,nile tilapia,Nile_tilapia,nile_tilapia,O. niloticus"),
ORNITHORHYNCHUS_ANATINUS(9258, "OANA", "ENSOAN", "platypus,oanatius,ornithorhynchus_anatinus"),
ORYCTEROPUS_AFER_AFER(1230840,"OryAfe","ENSOAF","aardvark,oafe,oafer_after,oryafe,orycteropus afer afer,orycteropus_afer_afer"),
ORYCTOLAGUS_CUNICULUS(9986, "OryCun", "ENSOCU", "rabbit,oryctolagus,domestic_rabbit,bunny,japanese_white_rabbit,european_rabbit,oryctolagus_cuniculus"),
ORYZIAS_LATIPES(8090, "MEDAKA", "ENSORL", "medaka,oryzias,japanese_medaka,japanese_rice_fish,japanese_ricefish,japanese_killifish,oryzias_latipes"),
OTOLEMUR_GARNETTII(30611, "OtoGar", "ENSOGA", "bushbaby,bush_baby,galago,small_eared_galago,ogarnettii,otolemur,otolemur_garnettii"),
OVIS_ARIES(9940, "Oar", "ENSOAR", "ovis_aries,oaries,oviari,sheep"),
PAN_TROGLODYTES(9598, "CHIMP", "ENSPTR", "chimp,chimpanzee,ptroglodytes,pantroglodytes,pan_troglodytes"),
PAPIO_ANUBIS(9555,"PapAnu", "ENSPAN", "papio_anubis"),
PAPIO_HAMADRYAS(9557, "Pham", "ENSPHA", "baboon,Papio_hamadryas,papio_hamadryas,papio_hamadryas,sacred_baboon,western_baboon,red_baboon"),
PELODISCUS_SINENSIS(13735, "PelSin", "ENSPSI", "Chinese_softshell_turtle,turtle,softshell_turtle,Trionyx_sinensis"),
PETROMYZON_MARINUS(7757, "Pmarinus", "ENSPMA", "lamprey,sea_lamprey,pmarinus,petromyzon,petromyzon_marinus"),
POECILIA_FORMOSA(48698, "PoeFor", "ENSPFO", "amazon molly,poecilia_formosa,pformosa,poefor,pfor"),
PHYSETER_MACROCEPHALUS(9755,"PhyMac", "ENSPMC", "physeter_macrocephalus, sperm whale, pmac, pmacrocephalus, physeter macrocephalus, phymac"),
PONGO_ABELII(9601, "PPYG", "ENSPPY", "orangutan,orang-utan,pabellii,pongo_abelii"),
PROCAVIA_CAPENSIS(9813, "PROCAP", "ENSPCA", "cape_rock_hyrax,caperockhyrax,procaviacapensis,procavia_capensis"),
PTEROPUS_VAMPYRUS(132908, "PTEVAM", "ENSPVA", "large_flying_fox,largeflyingfox,pteropusvampyrus,pteropus_vampyrus"),
RATTUS_NORVEGICUS(10116, "Rnor", "ENSRNO", "rat,rnovegicus,rattusnorvegicus,rattus_norvegicus"),
SACCHAROMYCES_CEREVISIAE(4932, "R", "IGNORE", "yeast,saccharomyces,scerevisiae,saccharomycescerevisiae,saccharomyces_cerevisiae"),
SAIMIRI_BOLIVIENSIS(39432, "SaiBol", "ENSSBO", "saimiri_boliviensis,sboliviensis,squirrel_monkey,bolivian_squirrel_monkey,squirrelmonkey"),
SARCOPHILUS_HARRISII(9305, "devil", "ENSSHA", "devil,Sarcophilus_harrisii,sarcophilus_harrisii,tasmanian_devil,taz"),
SOREX_ARANEUS(42254, "COMMON_SHREW", "ENSSAR", "shrew,common_shrew,commonShrew,european_shrew,saraneus,sorex,sorex_araneus"),
SUS_SCROFA(9823, "Sscrofa", "ENSSSC", "pig,boar,wildboar,wild_boar,susscrofa,sus_scrofa"),
TAENIOPYGIA_GUTTATA(59729, "taeGut", "ENSTGU", "zebrafinch,zebra_finch,taeniopygia_guttata,taeniopygiaguttata,tguttata,poephila_guttata,taenopygia_guttata"),
TAKIFUGU_RUBRIPES(31033, "FUGU", "ENSTRU", "pufferfish,fugu,frubripes,fugurubripes,fugu_rubripes,takifugu,trubripes,takifugurubripes,takifugu_rubripes"),
TARSIUS_SYRICHTA(9478, "TARSYR", "ENSTSY", "philippine_tarsier,philippinetarsier,tarsiussyrichta,tarsius_syrichta"),
TETRAODON_NIGROVIRIDIS(99883, "TETRAODON", "IGNORE", "tetraodon,tnigroviridis,tetraodonnigroviridis,tetraodon_nigroviridis"),
TUPAIA_BELANGERI(37347, "TREESHREW", "ENSTBE", "treeshrew,tbelangeri,northern_tree_shrew,common_tree_shrew,tupaia_belangeri"),
TURSIOPS_TRUNCATUS(9739, "TURTRU", "ENSTTR", "bottlenosed_dolphin,dolphin,tursiopstruncatus,tursiops_truncatus"),
VICUGNA_PACOS(30538, "VICPAC", "ENSVPA", "alpaca,vicugnapacos,vicugna_pacos"),
XENOPUS_TROPICALIS(8364, "JGI", "ENSXET", "pipid,pipidfrog,xenopus,xtropicalis,xenopustropicalis,xenopus_tropicalis"),
XIPHOPHORUS_MACULATUS(8083, "Xipmac", "ENSXMA", "xiphophorous_maculatus,platyfish,southern_platyfish"),
// MASTER_SCHEMA(0, "", "", "master_schema,masterschema,schema"),
HEALTHCHECK(0, "", "", ""),
HELP(0, "", "", ""),
NCBI_TAXONOMY(0, "", "", ""),
SYSTEM(0, "", "", ""),
ENSEMBL_WEBSITE(0, "", "", ""),
UNKNOWN(0, "", "", ""),
ANCESTRAL_SEQUENCES(0, "", "", "ancestral,ancestor");
// Taxonomy IDs - see ensembl-compara/sql/taxon.txt
private static Map<Integer, Species> taxonIDToSpecies = new HashMap<Integer, Species>();
private static Map<String, Species> assemblyPrefixToSpecies = new HashMap<String, Species>();
private static Map<Species, String> vegaStableIDPrefix = new EnumMap<Species, String>(Species.class);
private static Logger logger = Logger.getLogger("HealthCheckLogger");
// populate the hash tables
static {
for (Species s : values()) {
taxonIDToSpecies.put(s.getTaxonID(), s);
assemblyPrefixToSpecies.put(s.getAssemblyPrefix(), s);
// we have to add to the Vega hash the 4 species with Vega annotation
switch (s) {
case HOMO_SAPIENS:
vegaStableIDPrefix.put(Species.HOMO_SAPIENS, "OTTHUM");
break;
case MUS_MUSCULUS:
vegaStableIDPrefix.put(Species.MUS_MUSCULUS, "OTTMUS");
break;
case CANIS_FAMILIARIS:
vegaStableIDPrefix.put(Species.CANIS_FAMILIARIS, "OTTCAN");
break;
case DANIO_RERIO:
vegaStableIDPrefix.put(Species.DANIO_RERIO, "OTTDAR");
break;
}
}
}
private final int taxonID;
private final String assemblyPrefix;
private final String stableIDPrefix;
private final String alias;
private final Set<String> aliasSet;
private Species(int tax_id, String assembly, String stableID, String alias) {
this.taxonID = tax_id;
this.assemblyPrefix = assembly;
this.stableIDPrefix = stableID;
this.alias = alias;
//Build a hash set of lowercased aliases rather than using splits of aliases everytime
Set<String> aliasSet = new HashSet<String>();
for(String a: alias.split(",")) {
aliasSet.add(a.toLowerCase().trim());
}
aliasSet.add(this.name().toLowerCase());
this.aliasSet = aliasSet;
}
// getters for the properties
public int getTaxonID() {
return taxonID;
};
public String getAssemblyPrefix() {
return assemblyPrefix;
};
public String getStableIDPrefix() {
return stableIDPrefix;
};
public String getAlias() {
return alias;
};
// methods to mantain backwards compatibility
// -----------------------------------------------------------------
/**
* Resolve an alias to a Species object.
*
* @param speciesAlias
* The alias (e.g. human, homosapiens, hsapiens)
* @return The species object corresponding to alias, or Species.UNKNOWN if it cannot be resolved.
*/
public static Species resolveAlias(String speciesAlias) {
String alias = speciesAlias.toLowerCase();
// --------------------------------------
for (Species s : values()) {
if(s.aliasSet.contains(alias)) {
return s;
}
}
return Species.UNKNOWN;
}
// -----------------------------------------------------------------
/**
* Get the taxonomy ID associated with a particular species.
*
* @param s
* The species to look up.
* @return The taxonomy ID associated with s, or "" if none is found.
*/
public static String getTaxonomyID(Species s) {
String result = "";
result = Integer.toString(s.getTaxonID());
return result;
}
// -----------------------------------------------------------------
/**
* Get the species associated with a particular taxonomy ID.
*
* @param t
* The taxonomy ID to look up.
* @return The species associated with t, or Species.UNKNOWN if none is found.
*/
public static Species getSpeciesFromTaxonomyID(int taxon) {
Species result = UNKNOWN;
if (taxonIDToSpecies.containsKey(taxon)) {
result = (Species) taxonIDToSpecies.get(taxon);
} else {
logger.warning("Cannot get species for taxonomy ID " + taxon + " returning Species.UNKNOWN");
}
return result;
}
// -------------------------------------------------------------------------
/**
* Return a Species object corresponding to a particular assembly prefix.
*
* @param prefix
* The assembly prefix.
*
* @return The Species corresponding to prefix, or Species.UNKNOWN.
*/
public static Species getSpeciesForAssemblyPrefix(String prefix) {
Species result = Species.UNKNOWN;
if (assemblyPrefixToSpecies.containsKey(prefix)) {
result = (Species) assemblyPrefixToSpecies.get(prefix);
} else {
result = Species.UNKNOWN;
}
return result;
}
// -------------------------------------------------------------------------
/**
* Get the assembly prefix for a species.
*
* @param s
* The species.
* @return The assembly prefix for s.
*/
public static String getAssemblyPrefixForSpecies(Species s) {
return (String) s.getAssemblyPrefix();
}
// -------------------------------------------------------------------------
/**
* Get the stable ID prefix for a species.
*
* @param s
* The species.
* @param t
* The type of database.
* @return The stable ID prefix for s. Note "IGNORE" is used for imported species.
*/
public static String getStableIDPrefixForSpecies(Species s, DatabaseType t) {
String result = "";
if (t.equals(DatabaseType.CORE)) {
result = (String) s.getStableIDPrefix();
} else if (t.equals(DatabaseType.VEGA)) {
result = (String) vegaStableIDPrefix.get(s);
}
if (result == null || result.equals("")) {
logger.warning("Can't get stable ID prefix for " + s.toString() + " " + t.toString() + " database");
}
return result;
}
// -------------------------------------------------------------------------
/**
* Get the BioMart table root for a species (e.g. hsapiens, mmusculus)
*/
public String getBioMartRoot() {
String[] bits = this.name().toLowerCase().split("_");
return bits.length > 1 ? bits[0].substring(0, 1) + bits[1] : "";
}
public String toString() {
return this.name().toLowerCase();
}
// -----------------------------------------------------------------
}
|
src/org/ensembl/healthcheck/Species.java
|
/*
* Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ensembl.healthcheck;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
public enum Species {
// defined new Species and properties: taxonomy_id, assemblyprefix, stableIDprefix, alias
AEDES_AEGYPTI(7159, "", "IGNORE", "aedes,aedesaegypti,aedes_aegypti"),
AILUROPODA_MELANOLEUCA(9646, "ailMel", "ENSAME", "panda,giant panda,ailuropoda melanoleuca,ailuropoda_melanoleuca"),
ANAS_PLATYRHYNCHOS(8839, "BGI_duck", "ENSAPL","anapla,apla,mallard,anas_platyrhynchos,aplatyrhynchos,duck,anas platyrhynchos"),
ANOLIS_CAROLINENSIS(28377, "AnoCar", "ENSACA", "lizard,anole,anolis_lizard,anolis,anolis_carolinensis"),
ANOPHELES_GAMBIAE(7165, "AgamP", "IGNORE", "mosquito,anopheles,agambiae,anophelesgambiae,anopheles_gambiae"),
APIS_MELLIFERA(7460, "AMEL", "IGNORE", "honeybee,honey_bee,apis,amellifera,apismellifera,apis_mellifera"),
ASTYANAX_MEXICANUS(7994, "AstMex", "ENSAMX", "amex,amexicanus,astmex,astyanax mexicanus,astyanax_mexicanus,cave fish"),
BOS_TAURUS(9913, "UMD", "ENSBTA", "cow,btaurus,bostaurus,bos_taurus"),
CAENORHABDITIS_BRIGGSAE(6238, "CBR", "IGNORE", "briggsae,cbriggsae,caenorhabditisbriggsae,caenorhabditis_briggsae"),
CAENORHABDITIS_ELEGANS(6239, "WBcel", "IGNORE", "elegans,celegans,caenorhabditiselegans,caenorhabditis_elegans"),
CALLITHRIX_JACCHUS(9483, "C_jacchus", "ENSCJA", "marmoset,white-tufted-ear marmoset,callithrix_jacchus,callithrix jacchus,Callithrix_jacchus,Callithrix jacchus,callithrix"),
CANIS_FAMILIARIS(9615, "CanFam", "ENSCAF", "dog,doggy,cfamiliaris,canisfamiliaris,canis_familiaris"),
CAVIA_PORCELLUS(10141, "CAVPOR", "ENSCPO", "guineapig,guinea_pig,cporcellus,cavia_porcellus"),
CERATOTHERIUM_SIMUM_SIMUM(73337,"CerSimSim","ENSCSI","ceratotherium simum simum,ceratotherium_simum_simum,cersim,csim,csimum_simum,rhinoceros"),
CHOLOEPUS_HOFFMANNI(9358, "choHof", "ENSCHO", "Sloth,Two-toed_sloth,Hoffmans_two-fingered_sloth,choloepus_hoffmanni"),
CHLOROCEBUS_SABAEUS(60711,"ChlSab","ENSCSA","chlorocebus_sabaeus,chlorocebus_aethiops_sabaeus,vervet monkey,african green monkey,green monkey"),
CIONA_INTESTINALIS(7719, "KH", "ENSCIN", "cionaintestinalis,ciona_int,ciona_intestinalis"),
CIONA_SAVIGNYI(51511, "CSAV", "ENSCSAV", "savignyi,cionasavignyi,csavignyi,ciona_savignyi"),
CRICETULUS_GRISEUS(10029, "CriGri", "ENSCGR", "hamster,chinese_hamster,cgriseus,cricetulus_griseus"),
CULEX_PIPIENS(7175, "CpiJ", "CPIJ", "culex,culexpipiens,culex_pipiens"),
DANIO_RERIO(7955, "Zv", "ENSDAR", "zebrafish,danio,drerio,daniorerio,danio_rerio"),
DASYPUS_NOVEMCINCTUS(9361, "DasNov", "ENSDNO", "armadillo,daisy,dasypus,nine_banded_armadillo,nine-banded_armadillo,texas_armadillo,dasypus_novemcinctus"),
DIPODOMYS_ORDII(10020, "DIPORD", "ENSDOR", "ords_kangaroo_rat,ordskangaroorat,kangaroo_rat, kangaroorat , dipodomys_ordii"),
DROSOPHILA_ANANASSAE(7217, "dana", "IGNORE", "drosophila,ananassae,drosophilaananassae,drosophila_ananassae,dana"),
DROSOPHILA_GRIMSHAWI(7222, "dgri", "IGNORE", "drosophila,grimshawi,drosophilagrimshawi,drosophila_grimshawi,dgri"),
DROSOPHILA_MELANOGASTER(7227, "BDGP", "IGNORE", "drosophila,dmelongaster,drosophilamelanogaster,drosophila_melanogaster"),
DROSOPHILA_PSEUDOOBSCURA(7237, "BCM-HGSC", "IGNORE", "drosophila,pseudoobscura,drosophilapseudoobscura,drosophila_pseudoobscura,dpse"),
DROSOPHILA_WILLISTONI(7260, "dwil", "IGNORE", "drosophila,willistoni,drosophilawillistonii,drosophila_willistoni,dwil"),
DROSOPHILA_YAKUBA(7245, "dyak", "IGNORE", "drosophila,yakuba,drosophilayakuba,drosophila_yakuba,dyak"),
ECHINOPS_TELFAIRI(9371, "TENREC", "ENSETE", "tenrec,echinops,small_madagascar_hedgehog,lesser_hedgehog_tenrec,echinops_telfairi"),
EQUUS_CABALLUS(9796, "EquCab", "ENSECA", "horse,equus,mr_ed,ecaballus,equus_caballus"),
ERINACEUS_EUROPAEUS(9365, "HEDGEHOG", "ENSEEU", "hedgehog,european_hedgehog,eeuropaeus,erinaceus_europaeus"),
FICEDULA_ALBICOLLIS(59894, "FicAlb", "ENSFAL", "flycatcher,falbicollis,collared_flycatcher,f_albicollis,ficalb"),
FELIS_CATUS(9685, "Felis_catus", "ENSFCA", "cat,fcatus,felis,domestic_cat,felis_catus"),
GADUS_MORHUA(8049, "gadMor", "ENSGMO", "cod,gadus_morhua,gmorhua,atlantic_cod"),
GALLUS_GALLUS(9031, "Galgal", "ENSGAL", "chicken,chick,ggallus,gallusgallus,gallus_gallus"),
GASTEROSTEUS_ACULEATUS(69293, "BROADS", "ENSGAC", "stickleback,gas_aculeatus,gasaculeatus,gasterosteusaculeatus,gasterosteus_aculeatus"),
GORILLA_GORILLA(9595, "gorGor", "ENSGGO", "gorilla,gorilla_gorilla,ggor"),
HETEROCEPHALUS_GLABER(10181, "HetGla", "ENSHGL", "naked_mole_rat,heterocephalus_glaber,hglaber"),
HOMO_SAPIENS(9606, "GRCh", "ENS", "human,hsapiens,homosapiens,homo_sapiens"),
ICTIDOMYS_TRIDECEMLINEATUS(43179, "spetri", "ENSSTO", "squirrel,stridecemlineatus,thirteen-lined_ground_squirrel,ictidomys_tridecemlineatus_arenicola,ictidomys_tridecemlineatus"),
LATIMERIA_CHALUMNAE(7897,"LatCha", "ENSLAC","coelacanth,latimeria_chalumnae,latimeria,l_chalumnae,Latimeria chalumnae"),
LEPISOSTEUS_OCULATUS(7918, "LepOcu","ENSLOC","spotted_gar"),
LOXODONTA_AFRICANA(9785, "LoxAfr", "ENSLAF", "elephant,nelly,loxodonta,african_elephant,african_savannah_elephant,african_bush_elephant,loxodonta_africana"),
MACACA_MULATTA(9544, "MM", "ENSMMU", "macacamulatta,rhesusmacaque,rhesus_macaque,macaque,macaca_mulatta"),
MACROPUS_EUGENII(9315, "Meug", "ENSMEU", "wallaby,tammar_wallaby,macropuseugenii,m_eugenii,tammarwallaby,Macropus eugenii,macropus_eugenii"),
MELEAGRIS_GALLOPAVO(9103, "UMD", "ENSMGA", "turkey,common turkey,wild turkey,meleagris_gallopavo, meleagris_gallopavo"),
MELOPSITTACUS_UNDULATUS(13146, "MelUnd", "ENSMUN", "budgerigar,melopsittacus_undulatus,mundulatus"),
MICROCEBUS_MURINUS(30608, "micMur", "ENSMIC", "mouse_lemur,mouselemur,microcebus,microcebus_murinus"),
MONODELPHIS_DOMESTICA(13616, "BROADO", "ENSMOD", "opossum,monodelphis,mdomestica,mdomesticus,monodelphisdomestica,monodelphisdomesticus,monodelphis_domesticus,monodelphis_domestica"),
MUS_MUSCULUS(10090, "GRCm", "ENSMUS", "mouse,mmusculus,musmusculus,mus_musculus"),
MUSTELA_PUTORIUS_FURO(9669, "MusPutFur", "ENSMPU", "ferret,domestic ferret,Mustela_putorius_furo,Mustela putorius furo"),
MYOTIS_LUCIFUGUS(59463, "Myoluc", "ENSMLU", "microbat,little_brown_bat,mlucifugus,myotis,myotis_lucifugus"),
NOMASCUS_LEUCOGENYS(61853, "NLEU", "ENSNLE","gibbon,nleu,nomleu,nleugogenys,nomascus_leucogenys"),
OCHOTONA_PRINCEPS(9978, "PIKA", "ENSOPR", "pika,Americanpika,American_pika,oprinceps,ochotona,ochotona_princeps"),
OREOCHROMIS_NILOTICUS(8128, "Orenil", "ENSONI", "tilapia,Oreochromis niloticus,oreochromis niloticus,Oreochromis_niloticus,oreochromis_niloticus,Nile tilapia,nile tilapia,Nile_tilapia,nile_tilapia,O. niloticus"),
ORNITHORHYNCHUS_ANATINUS(9258, "OANA", "ENSOAN", "platypus,oanatius,ornithorhynchus_anatinus"),
ORYCTEROPUS_AFER_AFER(1230840,"OryAfe","ENSOAF","aardvark,oafe,oafer_after,oryafe,orycteropus afer afer,orycteropus_afer_afer"),
ORYCTOLAGUS_CUNICULUS(9986, "OryCun", "ENSOCU", "rabbit,oryctolagus,domestic_rabbit,bunny,japanese_white_rabbit,european_rabbit,oryctolagus_cuniculus"),
ORYZIAS_LATIPES(8090, "MEDAKA", "ENSORL", "medaka,oryzias,japanese_medaka,japanese_rice_fish,japanese_ricefish,japanese_killifish,oryzias_latipes"),
OTOLEMUR_GARNETTII(30611, "OtoGar", "ENSOGA", "bushbaby,bush_baby,galago,small_eared_galago,ogarnettii,otolemur,otolemur_garnettii"),
OVIS_ARIES(9940, "Oar", "ENSOAR", "ovis_aries,oaries,oviari,sheep"),
PAN_TROGLODYTES(9598, "CHIMP", "ENSPTR", "chimp,chimpanzee,ptroglodytes,pantroglodytes,pan_troglodytes"),
PAPIO_ANUBIS(9555,"PapAnu", "ENSPAN", "papio_anubis"),
PAPIO_HAMADRYAS(9557, "Pham", "ENSPHA", "baboon,Papio_hamadryas,papio_hamadryas,papio_hamadryas,sacred_baboon,western_baboon,red_baboon"),
PELODISCUS_SINENSIS(13735, "PelSin", "ENSPSI", "Chinese_softshell_turtle,turtle,softshell_turtle,Trionyx_sinensis"),
PETROMYZON_MARINUS(7757, "Pmarinus", "ENSPMA", "lamprey,sea_lamprey,pmarinus,petromyzon,petromyzon_marinus"),
POECILIA_FORMOSA(48698, "PoeFor", "ENSPFO", "amazon molly,poecilia_formosa,pformosa,poefor,pfor"),
PHYSETER_MACROCEPHALUS(9755,"PhyMac", "ENSPMC", "physeter_macrocephalus, sperm whale, pmac, pmacrocephalus, physeter macrocephalus, phymac"),
PONGO_ABELII(9601, "PPYG", "ENSPPY", "orangutan,orang-utan,pabellii,pongo_abelii"),
PROCAVIA_CAPENSIS(9813, "PROCAP", "ENSPCA", "cape_rock_hyrax,caperockhyrax,procaviacapensis,procavia_capensis"),
PTEROPUS_VAMPYRUS(132908, "PTEVAM", "ENSPVA", "large_flying_fox,largeflyingfox,pteropusvampyrus,pteropus_vampyrus"),
RATTUS_NORVEGICUS(10116, "Rnor", "ENSRNO", "rat,rnovegicus,rattusnorvegicus,rattus_norvegicus"),
SACCHAROMYCES_CEREVISIAE(4932, "R", "IGNORE", "yeast,saccharomyces,scerevisiae,saccharomycescerevisiae,saccharomyces_cerevisiae"),
SAIMIRI_BOLIVIENSIS(39432, "SaiBol", "ENSSBO", "saimiri_boliviensis,sboliviensis,squirrel_monkey,bolivian_squirrel_monkey,squirrelmonkey"),
SARCOPHILUS_HARRISII(9305, "devil", "ENSSHA", "devil,Sarcophilus_harrisii,sarcophilus_harrisii,tasmanian_devil,taz"),
SOREX_ARANEUS(42254, "COMMON_SHREW", "ENSSAR", "shrew,common_shrew,commonShrew,european_shrew,saraneus,sorex,sorex_araneus"),
SUS_SCROFA(9823, "Sscrofa", "ENSSSC", "pig,boar,wildboar,wild_boar,susscrofa,sus_scrofa"),
TAENIOPYGIA_GUTTATA(59729, "taeGut", "ENSTGU", "zebrafinch,zebra_finch,taeniopygia_guttata,taeniopygiaguttata,tguttata,poephila_guttata,taenopygia_guttata"),
TAKIFUGU_RUBRIPES(31033, "FUGU", "ENSTRU", "pufferfish,fugu,frubripes,fugurubripes,fugu_rubripes,takifugu,trubripes,takifugurubripes,takifugu_rubripes"),
TARSIUS_SYRICHTA(9478, "TARSYR", "ENSTSY", "philippine_tarsier,philippinetarsier,tarsiussyrichta,tarsius_syrichta"),
TETRAODON_NIGROVIRIDIS(99883, "TETRAODON", "IGNORE", "tetraodon,tnigroviridis,tetraodonnigroviridis,tetraodon_nigroviridis"),
TUPAIA_BELANGERI(37347, "TREESHREW", "ENSTBE", "treeshrew,tbelangeri,northern_tree_shrew,common_tree_shrew,tupaia_belangeri"),
TURSIOPS_TRUNCATUS(9739, "TURTRU", "ENSTTR", "bottlenosed_dolphin,dolphin,tursiopstruncatus,tursiops_truncatus"),
VICUGNA_PACOS(30538, "VICPAC", "ENSVPA", "alpaca,vicugnapacos,vicugna_pacos"),
XENOPUS_TROPICALIS(8364, "JGI", "ENSXET", "pipid,pipidfrog,xenopus,xtropicalis,xenopustropicalis,xenopus_tropicalis"),
XIPHOPHORUS_MACULATUS(8083, "Xipmac", "ENSXMA", "xiphophorous_maculatus,platyfish,southern_platyfish"),
// MASTER_SCHEMA(0, "", "", "master_schema,masterschema,schema"),
HEALTHCHECK(0, "", "", ""),
HELP(0, "", "", ""),
NCBI_TAXONOMY(0, "", "", ""),
SYSTEM(0, "", "", ""),
ENSEMBL_WEBSITE(0, "", "", ""),
UNKNOWN(0, "", "", ""),
ANCESTRAL_SEQUENCES(0, "", "", "ancestral,ancestor");
// Taxonomy IDs - see ensembl-compara/sql/taxon.txt
private static Map<Integer, Species> taxonIDToSpecies = new HashMap<Integer, Species>();
private static Map<String, Species> assemblyPrefixToSpecies = new HashMap<String, Species>();
private static Map<Species, String> vegaStableIDPrefix = new EnumMap<Species, String>(Species.class);
private static Logger logger = Logger.getLogger("HealthCheckLogger");
// populate the hash tables
static {
for (Species s : values()) {
taxonIDToSpecies.put(s.getTaxonID(), s);
assemblyPrefixToSpecies.put(s.getAssemblyPrefix(), s);
// we have to add to the Vega hash the 4 species with Vega annotation
switch (s) {
case HOMO_SAPIENS:
vegaStableIDPrefix.put(Species.HOMO_SAPIENS, "OTTHUM");
break;
case MUS_MUSCULUS:
vegaStableIDPrefix.put(Species.MUS_MUSCULUS, "OTTMUS");
break;
case CANIS_FAMILIARIS:
vegaStableIDPrefix.put(Species.CANIS_FAMILIARIS, "OTTCAN");
break;
case DANIO_RERIO:
vegaStableIDPrefix.put(Species.DANIO_RERIO, "OTTDAR");
break;
}
}
}
private final int taxonID;
private final String assemblyPrefix;
private final String stableIDPrefix;
private final String alias;
private final Set<String> aliasSet;
private Species(int tax_id, String assembly, String stableID, String alias) {
this.taxonID = tax_id;
this.assemblyPrefix = assembly;
this.stableIDPrefix = stableID;
this.alias = alias;
//Build a hash set of lowercased aliases rather than using splits of aliases everytime
Set<String> aliasSet = new HashSet<String>();
for(String a: alias.split(",")) {
aliasSet.add(a.toLowerCase().trim());
}
aliasSet.add(this.name().toLowerCase());
this.aliasSet = aliasSet;
}
// getters for the properties
public int getTaxonID() {
return taxonID;
};
public String getAssemblyPrefix() {
return assemblyPrefix;
};
public String getStableIDPrefix() {
return stableIDPrefix;
};
public String getAlias() {
return alias;
};
// methods to mantain backwards compatibility
// -----------------------------------------------------------------
/**
* Resolve an alias to a Species object.
*
* @param speciesAlias
* The alias (e.g. human, homosapiens, hsapiens)
* @return The species object corresponding to alias, or Species.UNKNOWN if it cannot be resolved.
*/
public static Species resolveAlias(String speciesAlias) {
String alias = speciesAlias.toLowerCase();
// --------------------------------------
for (Species s : values()) {
if(s.aliasSet.contains(alias)) {
return s;
}
}
return Species.UNKNOWN;
}
// -----------------------------------------------------------------
/**
* Get the taxonomy ID associated with a particular species.
*
* @param s
* The species to look up.
* @return The taxonomy ID associated with s, or "" if none is found.
*/
public static String getTaxonomyID(Species s) {
String result = "";
result = Integer.toString(s.getTaxonID());
return result;
}
// -----------------------------------------------------------------
/**
* Get the species associated with a particular taxonomy ID.
*
* @param t
* The taxonomy ID to look up.
* @return The species associated with t, or Species.UNKNOWN if none is found.
*/
public static Species getSpeciesFromTaxonomyID(int taxon) {
Species result = UNKNOWN;
if (taxonIDToSpecies.containsKey(taxon)) {
result = (Species) taxonIDToSpecies.get(taxon);
} else {
logger.warning("Cannot get species for taxonomy ID " + taxon + " returning Species.UNKNOWN");
}
return result;
}
// -------------------------------------------------------------------------
/**
* Return a Species object corresponding to a particular assembly prefix.
*
* @param prefix
* The assembly prefix.
*
* @return The Species corresponding to prefix, or Species.UNKNOWN.
*/
public static Species getSpeciesForAssemblyPrefix(String prefix) {
Species result = Species.UNKNOWN;
if (assemblyPrefixToSpecies.containsKey(prefix)) {
result = (Species) assemblyPrefixToSpecies.get(prefix);
} else {
result = Species.UNKNOWN;
}
return result;
}
// -------------------------------------------------------------------------
/**
* Get the assembly prefix for a species.
*
* @param s
* The species.
* @return The assembly prefix for s.
*/
public static String getAssemblyPrefixForSpecies(Species s) {
return (String) s.getAssemblyPrefix();
}
// -------------------------------------------------------------------------
/**
* Get the stable ID prefix for a species.
*
* @param s
* The species.
* @param t
* The type of database.
* @return The stable ID prefix for s. Note "IGNORE" is used for imported species.
*/
public static String getStableIDPrefixForSpecies(Species s, DatabaseType t) {
String result = "";
if (t.equals(DatabaseType.CORE)) {
result = (String) s.getStableIDPrefix();
} else if (t.equals(DatabaseType.VEGA)) {
result = (String) vegaStableIDPrefix.get(s);
}
if (result == null || result.equals("")) {
logger.warning("Can't get stable ID prefix for " + s.toString() + " " + t.toString() + " database");
}
return result;
}
// -------------------------------------------------------------------------
/**
* Get the BioMart table root for a species (e.g. hsapiens, mmusculus)
*/
public String getBioMartRoot() {
String[] bits = this.name().toLowerCase().split("_");
return bits.length > 1 ? bits[0].substring(0, 1) + bits[1] : "";
}
public String toString() {
return this.name().toLowerCase();
}
// -----------------------------------------------------------------
}
|
Changed prefix for zebrafish from zv to GRCz
|
src/org/ensembl/healthcheck/Species.java
|
Changed prefix for zebrafish from zv to GRCz
|
|
Java
|
apache-2.0
|
439693b527036f1b344a214e8ce60e5e4957a5f3
| 0
|
JerJohn15/concourse,remiemalik/concourse,bigtreeljc/concourse,aabdin01/concourse,hcuffy/concourse,remiemalik/concourse,cinchapi/concourse,dubex/concourse,aabdin01/concourse,remiemalik/concourse,chiranjeevjain/concourse,karthikprabhu17/concourse,vrnithinkumar/concourse,remiemalik/concourse,MattiasZurkovic/concourse,mAzurkovic/concourse,prateek135/concourse,kylycht/concourse,prateek135/concourse,JerJohn15/concourse,chiranjeevjain/concourse,JerJohn15/concourse,kylycht/concourse,Qunzer/concourse,cinchapi/concourse,cinchapi/concourse,mAzurkovic/concourse,MattiasZurkovic/concourse,mAzurkovic/concourse,mAzurkovic/concourse,hcuffy/concourse,Qunzer/concourse,dubex/concourse,savanibharat/concourse,JerJohn15/concourse,dubex/concourse,dubex/concourse,remiemalik/concourse,karthikprabhu17/concourse,vrnithinkumar/concourse,chiranjeevjain/concourse,karthikprabhu17/concourse,bigtreeljc/concourse,mAzurkovic/concourse,vrnithinkumar/concourse,JerJohn15/concourse,vrnithinkumar/concourse,bigtreeljc/concourse,MattiasZurkovic/concourse,MattiasZurkovic/concourse,karthikprabhu17/concourse,JerJohn15/concourse,bigtreeljc/concourse,bigtreeljc/concourse,cinchapi/concourse,prateek135/concourse,dubex/concourse,savanibharat/concourse,Qunzer/concourse,remiemalik/concourse,savanibharat/concourse,aabdin01/concourse,vrnithinkumar/concourse,kylycht/concourse,aabdin01/concourse,aabdin01/concourse,savanibharat/concourse,prateek135/concourse,dubex/concourse,hcuffy/concourse,savanibharat/concourse,prateek135/concourse,cinchapi/concourse,cinchapi/concourse,Qunzer/concourse,hcuffy/concourse,Qunzer/concourse,chiranjeevjain/concourse,chiranjeevjain/concourse,bigtreeljc/concourse,chiranjeevjain/concourse,kylycht/concourse,Qunzer/concourse,MattiasZurkovic/concourse,kylycht/concourse,karthikprabhu17/concourse,prateek135/concourse,MattiasZurkovic/concourse,kylycht/concourse,karthikprabhu17/concourse,savanibharat/concourse,aabdin01/concourse,hcuffy/concourse,vrnithinkumar/concourse,mAzurkovic/concourse,hcuffy/concourse
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2013-2014 Jeff Nelson, Cinchapi Software Collective
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.cinchapi.concourse.server.storage.db;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import javax.annotation.Nullable;
import javax.annotation.concurrent.ThreadSafe;
import org.cinchapi.concourse.annotate.PackagePrivate;
import org.cinchapi.concourse.server.io.Byteable;
import org.cinchapi.concourse.server.model.PrimaryKey;
import org.cinchapi.concourse.server.model.Text;
import org.cinchapi.concourse.server.model.Value;
import org.cinchapi.concourse.server.storage.Action;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* A wrapper around a collection of Revisions that provides in-memory indices to
* allow efficient reads. All the Revisions in the Record must have the same
* locator. They must also have the same key if the Revision is partial.
*
* @author jnelson
* @param <L> - the locator type
* @param <K> - the key type
* @param <V> - value type
*/
@PackagePrivate
@ThreadSafe
@SuppressWarnings("unchecked")
abstract class Record<L extends Byteable & Comparable<L>, K extends Byteable & Comparable<K>, V extends Byteable & Comparable<V>> {
/**
* Return a PrimaryRecord for {@code primaryKey}.
*
* @param primaryKey
* @return the PrimaryRecord
*/
public static PrimaryRecord createPrimaryRecord(PrimaryKey record) {
return new PrimaryRecord(record, null);
}
/**
* Return a partial PrimaryRecord for {@code key} in {@record}.
*
* @param primaryKey
* @param key
* @return the PrimaryRecord.
*/
public static PrimaryRecord createPrimaryRecordPartial(PrimaryKey record,
Text key) {
return new PrimaryRecord(record, key);
}
/**
* Return a SearchRecord for {@code key}.
*
* @param key
* @return the SearchRecord
*/
public static SearchRecord createSearchRecord(Text key) {
return new SearchRecord(key, null);
}
/**
* Return a partial SearchRecord for {@code term} in {@code key}.
*
* @param key
* @param term
* @return the partial SearchRecord
*/
public static SearchRecord createSearchRecordPartial(Text key, Text term) {
return new SearchRecord(key, term);
}
/**
* Return a SeconaryRecord for {@code key}.
*
* @param key
* @return the SecondaryRecord
*/
public static SecondaryRecord createSecondaryRecord(Text key) {
return new SecondaryRecord(key, null);
}
/**
* Return a partial SecondaryRecord for {@code value} in {@code key}.
*
* @param key
* @param value
* @return the SecondaryRecord
*/
public static SecondaryRecord createSecondaryRecordPartial(Text key,
Value value) {
return new SecondaryRecord(key, value);
}
/**
* The master lock for {@link #write} and {@link #read}. DO NOT use this
* lock directly.
*/
private final ReentrantReadWriteLock master = new ReentrantReadWriteLock();
/**
* An exclusive lock that permits only one writer and no reader. Use this
* lock to ensure that no read occurs while data is being appended to the
* Record.
*/
private final WriteLock write = master.writeLock();
/**
* A shared lock that permits many readers and no writer. Use this lock to
* ensure that no data append occurs while a read is happening within the
* Record.
*/
protected final ReadLock read = master.readLock();
/**
* The index is used to efficiently determine the set of values currently
* mapped from a key. The subclass should specify the appropriate type of
* key sorting via the returned type for {@link #mapType()}.
*/
protected final transient Map<K, Set<V>> present = mapType();
/**
* This index is used to efficiently handle historical reads. Given a
* revision (e.g key/value pair), and historical timestamp, we can count the
* number of times that the value appears <em>beforehand</em> at determine
* if the mapping existed or not.
*/
protected final transient HashMap<K, List<Revision<L, K, V>>> history = Maps
.newHashMap();
/**
* The version of the Record's most recently appended {@link Revision}.
*/
private transient long version = 0;
/**
* The locator used to identify this Record.
*/
private final L locator;
/**
* The key used to identify this Record. This value is {@code null} unless
* {@link #partial} equals {@code true}.
*/
@Nullable
private final K key;
/**
* Indicates that this Record is partial and only contains Revisions for a
* specific {@link #key}.
*/
private final boolean partial;
/**
* This set is returned when a key does not map to any values so that the
* caller can transparently interact without performing checks or
* compromising data consisentcy. This is a member variable (as opposed to
* static constant) that is mocked in the constructor because it has a
* generic type argument.
*/
private final Set<V> emptyValues = new EmptyValueSet();
/**
* Construct a new instance.
*
* @param locator
* @param key
*/
protected Record(L locator, @Nullable K key) {
this.locator = locator;
this.key = key;
this.partial = key == null ? false : true;
}
/**
* Append {@code revision} to the record by updating the in-memory indices.
* The {@code revision} must have:
* <ul>
* <li>a higher version than that of this Record</li>
* <li>a locator equal to that of this Record</li>
* <li>a key equal to that of this Record if this Record is partial</li>
* </ul>
*
* @param revision
*/
public void append(Revision<L, K, V> revision) {
write.lock();
try {
// NOTE: We only need to enforce the monotonic increasing constraint
// for PrimaryRecords because Secondary and Search records will be
// populated from Blocks that were sorted based primarily on
// non-version factors.
Preconditions
.checkArgument((this instanceof PrimaryRecord && revision
.getVersion() >= version) || true, "Cannot "
+ "append %s because its version(%s) is lower "
+ "than the Record's current version(%s). The",
revision, revision.getVersion(), version);
Preconditions.checkArgument(revision.getLocator().equals(locator),
"Cannot append %s because it does not belong to %s",
revision, this);
// NOTE: The check below is ignored for a partial SearchRecord
// instance because they 'key' is the entire search query, but we
// append Revisions for each term in the query
Preconditions.checkArgument(
(partial && revision.getKey().equals(key)) || !partial
|| this instanceof SearchRecord,
"Cannot append %s because it does not belong to %s",
revision, this);
// NOTE: The check below is ignored for a SearchRecord instance
// because it will legitimately appear that "duplicate" data has
// been added if similar data is added to the same key in a record
// at different times (i.e. adding John Doe and Johnny Doe to the
// "name")
Preconditions.checkArgument(this instanceof SearchRecord
|| isOffset(revision), "Cannot append "
+ "%s because it represents an action "
+ "involving a key, value and locator that has not "
+ "been offset.", revision);
// Update present index
Set<V> values = present.get(revision.getKey());
if(values == null) {
values = Sets.<V> newLinkedHashSet();
present.put(revision.getKey(), values);
}
if(revision.getType() == Action.ADD) {
values.add(revision.getValue());
}
else {
values.remove(revision.getValue());
if(values.isEmpty()) {
present.remove(revision.getKey());
}
}
// Update history index
List<Revision<L, K, V>> revisions = history.get(revision.getKey());
if(revisions == null) {
revisions = Lists.newArrayList();
history.put(revision.getKey(), revisions);
}
revisions.add(revision);
// Update metadata
version = Math.max(version, revision.getVersion());
}
finally {
write.unlock();
}
}
@Override
public boolean equals(Object obj) {
if(obj.getClass() == this.getClass()) {
Record<L, K, V> other = (Record<L, K, V>) obj;
return locator.equals(other.locator)
&& (partial ? key.equals(other.key) : true);
}
return false;
}
/**
* Return the Record's version, which is equal to the largest version of an
* appended Revision.
*
* @return the version
*/
public long getVersion() {
return version;
}
@Override
public int hashCode() {
return partial ? Objects.hash(locator, key) : locator.hashCode();
}
/**
* Return {@code true} if this record is partial.
*
* @return {@link #partial}
*/
public boolean isPartial() {
return partial;
}
@Override
public String toString() {
return getClass().getSimpleName() + " " + (partial ? key + " IN " : "")
+ locator;
}
/**
* Lazily retrieve an unmodifiable view of the current set of values mapped
* from {@code key}.
*
* @param key
* @return the set of mapped values for {@code key}
*/
protected Set<V> get(K key) {
read.lock();
try {
return present.containsKey(key) ? Collections
.unmodifiableSet(present.get(key)) : emptyValues;
}
finally {
read.unlock();
}
}
/**
* Lazily retrieve the historical set of values for {@code key} at
* {@code timestamp}.
*
* @param key
* @param timestamp
* @return the set of mapped values for {@code key} at {@code timestamp}.
*/
protected Set<V> get(K key, long timestamp) {
read.lock();
try {
Set<V> values = emptyValues;
if(history.containsKey(key)) {
values = Sets.newLinkedHashSet();
Iterator<Revision<L, K, V>> it = history.get(key).iterator();
while (it.hasNext()) {
Revision<L, K, V> revision = it.next();
if(revision.getVersion() <= timestamp) {
if(revision.getType() == Action.ADD) {
values.add(revision.getValue());
}
else {
values.remove(revision.getValue());
}
}
else {
break;
}
}
}
return values;
}
finally {
read.unlock();
}
}
/**
* Initialize the appropriate data structure for the {@link #present}.
*
* @return the initialized mappings
*/
protected abstract Map<K, Set<V>> mapType();
/**
* Return {@code true} if the action associated with {@code revision}
* offsets the last action for an equal revision.
*
* @param revision
* @return {@code true} if the revision if offset.
*/
private boolean isOffset(Revision<L, K, V> revision) {
return (revision.getType() == Action.ADD && !get(revision.getKey())
.contains(revision.getValue()))
|| (revision.getType() == Action.REMOVE && get(
revision.getKey()).contains(revision.getValue()));
}
/**
* An empty Set of type V that cannot be modified, but won't throw
* exceptions. This returned in instances when a key does not map to any
* values so that the caller can interact with the Set normally without
* performing validity checks and while preserving data consistency.
*
* @author jnelson
*/
private final class EmptyValueSet implements Set<V> {
@Override
public boolean add(V e) {
return false;
}
@Override
public boolean addAll(Collection<? extends V> c) {
return false;
}
@Override
public void clear() {}
@Override
public boolean contains(Object o) {
return false;
}
@Override
public boolean containsAll(Collection<?> c) {
return false;
}
@Override
public boolean isEmpty() {
return true;
}
@Override
public Iterator<V> iterator() {
return Collections.emptyIterator();
}
@Override
public boolean remove(Object o) {
return false;
}
@Override
public boolean removeAll(Collection<?> c) {
return false;
}
@Override
public boolean retainAll(Collection<?> c) {
return false;
}
@Override
public int size() {
return 0;
}
@Override
public Object[] toArray() {
return null;
}
@Override
public <T> T[] toArray(T[] a) {
return null;
}
}
}
|
concourse-server/src/main/java/org/cinchapi/concourse/server/storage/db/Record.java
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2013-2014 Jeff Nelson, Cinchapi Software Collective
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.cinchapi.concourse.server.storage.db;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import javax.annotation.Nullable;
import javax.annotation.concurrent.ThreadSafe;
import org.cinchapi.concourse.annotate.PackagePrivate;
import org.cinchapi.concourse.server.io.Byteable;
import org.cinchapi.concourse.server.model.PrimaryKey;
import org.cinchapi.concourse.server.model.Text;
import org.cinchapi.concourse.server.model.Value;
import org.cinchapi.concourse.server.storage.Action;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* A wrapper around a collection of Revisions that provides in-memory indices to
* allow efficient reads. All the Revisions in the Record must have the same
* locator. They must also have the same key if the Revision is partial.
*
* @author jnelson
* @param <L> - the locator type
* @param <K> - the key type
* @param <V> - value type
*/
@PackagePrivate
@ThreadSafe
@SuppressWarnings("unchecked")
abstract class Record<L extends Byteable & Comparable<L>, K extends Byteable & Comparable<K>, V extends Byteable & Comparable<V>> {
/**
* Return a PrimaryRecord for {@code primaryKey}.
*
* @param primaryKey
* @return the PrimaryRecord
*/
public static PrimaryRecord createPrimaryRecord(PrimaryKey record) {
return new PrimaryRecord(record, null);
}
/**
* Return a partial PrimaryRecord for {@code key} in {@record}.
*
* @param primaryKey
* @param key
* @return the PrimaryRecord.
*/
public static PrimaryRecord createPrimaryRecordPartial(PrimaryKey record,
Text key) {
return new PrimaryRecord(record, key);
}
/**
* Return a SearchRecord for {@code key}.
*
* @param key
* @return the SearchRecord
*/
public static SearchRecord createSearchRecord(Text key) {
return new SearchRecord(key, null);
}
/**
* Return a partial SearchRecord for {@code term} in {@code key}.
*
* @param key
* @param term
* @return the partial SearchRecord
*/
public static SearchRecord createSearchRecordPartial(Text key, Text term) {
return new SearchRecord(key, term);
}
/**
* Return a SeconaryRecord for {@code key}.
*
* @param key
* @return the SecondaryRecord
*/
public static SecondaryRecord createSecondaryRecord(Text key) {
return new SecondaryRecord(key, null);
}
/**
* Return a partial SecondaryRecord for {@code value} in {@code key}.
*
* @param key
* @param value
* @return the SecondaryRecord
*/
public static SecondaryRecord createSecondaryRecordPartial(Text key,
Value value) {
return new SecondaryRecord(key, value);
}
/**
* The master lock for {@link #write} and {@link #read}. DO NOT use this
* lock directly.
*/
private final ReentrantReadWriteLock master = new ReentrantReadWriteLock();
/**
* An exclusive lock that permits only one writer and no reader. Use this
* lock to ensure that no read occurs while data is being appended to the
* Record.
*/
private final WriteLock write = master.writeLock();
/**
* A shared lock that permits many readers and no writer. Use this lock to
* ensure that no data append occurs while a read is happening within the
* Record.
*/
protected final ReadLock read = master.readLock();
/**
* The index is used to efficiently determine the set of values currently
* mapped from a key. The subclass should specify the appropriate type of
* key sorting via the returned type for {@link #mapType()}.
*/
protected final transient Map<K, Set<V>> present = mapType();
/**
* This index is used to efficiently handle historical reads. Given a
* revision (e.g key/value pair), and historical timestamp, we can count the
* number of times that the value appears <em>beforehand</em> at determine
* if the mapping existed or not.
*/
protected final transient HashMap<K, List<Revision<L, K, V>>> history = Maps
.newHashMap();
/**
* The version of the Record's most recently appended {@link Revision}.
*/
private transient long version = 0;
/**
* The locator used to identify this Record.
*/
private final L locator;
/**
* The key used to identify this Record. This value is {@code null} unless
* {@link #partial} equals {@code true}.
*/
@Nullable
private final K key;
/**
* Indicates that this Record is partial and only contains Revisions for a
* specific {@link #key}.
*/
private final boolean partial;
/**
* This set is returned when a key does not map to any values so that the
* caller can transparently interact without performing checks or
* compromising data consisentcy. This is a member variable (as opposed to
* static constant) that is mocked in the constructor because it has a
* generic type argument.
*/
private final Set<V> emptyValues = new EmptyValueSet();
/**
* Construct a new instance.
*
* @param locator
* @param key
*/
protected Record(L locator, @Nullable K key) {
this.locator = locator;
this.key = key;
this.partial = key == null ? false : true;
}
/**
* Append {@code revision} to the record by updating the in-memory indices.
* The {@code revision} must have:
* <ul>
* <li>a higher version than that of this Record</li>
* <li>a locator equal to that of this Record</li>
* <li>a key equal to that of this Record if this Record is partial</li>
* </ul>
*
* @param revision
*/
public void append(Revision<L, K, V> revision) {
write.lock();
try {
// NOTE: We only need to enforce the monotonic increasing constraint
// for PrimaryRecords because Secondary and Search records will be
// populated from Blocks that were sorted based primarily on
// non-version factors.
Preconditions
.checkArgument((this instanceof PrimaryRecord && revision
.getVersion() >= version) || true, "Cannot "
+ "append %s because its version(%s) is lower "
+ "than the Record's current version(%s). The",
revision, revision.getVersion(), version);
Preconditions.checkArgument(revision.getLocator().equals(locator),
"Cannot append %s because it does not belong "
+ "to this Record", revision);
// NOTE: The check below is ignored for a partial SearchRecord
// instance because they 'key' is the entire search query, but we
// append Revisions for each term in the query
Preconditions.checkArgument(
(partial && revision.getKey().equals(key)) || !partial
|| this instanceof SearchRecord,
"Cannot append %s because it does not "
+ "belong to This Record", revision);
// NOTE: The check below is ignored for a SearchRecord instance
// because it will legitimately appear that "duplicate" data has
// been added if similar data is added to the same key in a record
// at different times (i.e. adding John Doe and Johnny Doe to the
// "name")
Preconditions.checkArgument(this instanceof SearchRecord
|| isOffset(revision), "Cannot append "
+ "%s because it represents an action "
+ "involving a key, value and locator that has not "
+ "been offset.", revision);
// Update present index
Set<V> values = present.get(revision.getKey());
if(values == null) {
values = Sets.<V> newLinkedHashSet();
present.put(revision.getKey(), values);
}
if(revision.getType() == Action.ADD) {
values.add(revision.getValue());
}
else {
values.remove(revision.getValue());
if(values.isEmpty()) {
present.remove(revision.getKey());
}
}
// Update history index
List<Revision<L, K, V>> revisions = history.get(revision.getKey());
if(revisions == null) {
revisions = Lists.newArrayList();
history.put(revision.getKey(), revisions);
}
revisions.add(revision);
// Update metadata
version = Math.max(version, revision.getVersion());
}
finally {
write.unlock();
}
}
@Override
public boolean equals(Object obj) {
if(obj.getClass() == this.getClass()) {
Record<L, K, V> other = (Record<L, K, V>) obj;
return locator.equals(other.locator)
&& (partial ? key.equals(other.key) : true);
}
return false;
}
/**
* Return the Record's version, which is equal to the largest version of an
* appended Revision.
*
* @return the version
*/
public long getVersion() {
return version;
}
@Override
public int hashCode() {
return partial ? Objects.hash(locator, key) : locator.hashCode();
}
/**
* Return {@code true} if this record is partial.
*
* @return {@link #partial}
*/
public boolean isPartial() {
return partial;
}
@Override
public String toString() {
return getClass().getSimpleName() + " " + (partial ? key + " IN " : "")
+ locator;
}
/**
* Lazily retrieve an unmodifiable view of the current set of values mapped
* from {@code key}.
*
* @param key
* @return the set of mapped values for {@code key}
*/
protected Set<V> get(K key) {
read.lock();
try {
return present.containsKey(key) ? Collections
.unmodifiableSet(present.get(key)) : emptyValues;
}
finally {
read.unlock();
}
}
/**
* Lazily retrieve the historical set of values for {@code key} at
* {@code timestamp}.
*
* @param key
* @param timestamp
* @return the set of mapped values for {@code key} at {@code timestamp}.
*/
protected Set<V> get(K key, long timestamp) {
read.lock();
try {
Set<V> values = emptyValues;
if(history.containsKey(key)) {
values = Sets.newLinkedHashSet();
Iterator<Revision<L, K, V>> it = history.get(key).iterator();
while (it.hasNext()) {
Revision<L, K, V> revision = it.next();
if(revision.getVersion() <= timestamp) {
if(revision.getType() == Action.ADD) {
values.add(revision.getValue());
}
else {
values.remove(revision.getValue());
}
}
else {
break;
}
}
}
return values;
}
finally {
read.unlock();
}
}
/**
* Initialize the appropriate data structure for the {@link #present}.
*
* @return the initialized mappings
*/
protected abstract Map<K, Set<V>> mapType();
/**
* Return {@code true} if the action associated with {@code revision}
* offsets the last action for an equal revision.
*
* @param revision
* @return {@code true} if the revision if offset.
*/
private boolean isOffset(Revision<L, K, V> revision) {
return (revision.getType() == Action.ADD && !get(revision.getKey())
.contains(revision.getValue()))
|| (revision.getType() == Action.REMOVE && get(
revision.getKey()).contains(revision.getValue()));
}
/**
* An empty Set of type V that cannot be modified, but won't throw
* exceptions. This returned in instances when a key does not map to any
* values so that the caller can interact with the Set normally without
* performing validity checks and while preserving data consistency.
*
* @author jnelson
*/
private final class EmptyValueSet implements Set<V> {
@Override
public boolean add(V e) {
return false;
}
@Override
public boolean addAll(Collection<? extends V> c) {
return false;
}
@Override
public void clear() {}
@Override
public boolean contains(Object o) {
return false;
}
@Override
public boolean containsAll(Collection<?> c) {
return false;
}
@Override
public boolean isEmpty() {
return true;
}
@Override
public Iterator<V> iterator() {
return Collections.emptyIterator();
}
@Override
public boolean remove(Object o) {
return false;
}
@Override
public boolean removeAll(Collection<?> c) {
return false;
}
@Override
public boolean retainAll(Collection<?> c) {
return false;
}
@Override
public int size() {
return 0;
}
@Override
public Object[] toArray() {
return null;
}
@Override
public <T> T[] toArray(T[] a) {
return null;
}
}
}
|
improve error message for easier debugging
|
concourse-server/src/main/java/org/cinchapi/concourse/server/storage/db/Record.java
|
improve error message for easier debugging
|
|
Java
|
apache-2.0
|
ff4adbc76dc6df9d5d5b337f3512bc35cc683c5f
| 0
|
henrichg/PhoneProfiles
|
package sk.henrichg.phoneprofiles;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.support.v4.app.NotificationCompat;
public class ImportantInfoNotification {
public static final int VERSION_CODE_FOR_NEWS = 9999; // news off
public static final int API_LEVEL_FOR_NEWS = 14; //21;
static public void showInfoNotification(Context context) {
PackageInfo pinfo = null;
int versionCode = 0;
try {
pinfo = context.getPackageManager().getPackageInfo(context.getPackageName(), 0);
versionCode = pinfo.versionCode;
if (versionCode > GlobalData.getShowInfoNotificationOnStartVersion(context)) {
boolean show = (versionCode >= VERSION_CODE_FOR_NEWS) &&
(android.os.Build.VERSION.SDK_INT >= API_LEVEL_FOR_NEWS);
GlobalData.setShowInfoNotificationOnStart(context, show, versionCode);
}
else
GlobalData.setShowInfoNotificationOnStartVersion(context, versionCode);
} catch (PackageManager.NameNotFoundException e) {
//e.printStackTrace();
}
if (GlobalData.getShowInfoNotificationOnStart(context, versionCode)) {
showNotificationForUnlinkRingerNotificationVolumes(context,
context.getString(R.string.info_notification_title),
context.getString(R.string.info_notification_text));
GlobalData.setShowInfoNotificationOnStart(context, false, versionCode);
}
}
static private void showNotificationForUnlinkRingerNotificationVolumes(Context context, String title, String text) {
NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(context)
.setSmallIcon(R.drawable.ic_pphelper_upgrade_notify) // notification icon
.setContentTitle(title) // title for notification
.setContentText(context.getString(R.string.app_name) + ": " + text) // message for notification
.setAutoCancel(true); // clear notification after click
Intent intent = new Intent(context, ImportantInfoActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
PendingIntent pi = PendingIntent.getActivity(context, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
mBuilder.setContentIntent(pi);
if (android.os.Build.VERSION.SDK_INT >= 16)
mBuilder.setPriority(Notification.PRIORITY_MAX);
if (android.os.Build.VERSION.SDK_INT >= 21)
{
mBuilder.setCategory(Notification.CATEGORY_RECOMMENDATION);
mBuilder.setVisibility(Notification.VISIBILITY_PUBLIC);
}
NotificationManager mNotificationManager = (NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE);
mNotificationManager.notify(GlobalData.IMPORTANT_INFO_NOTIFICATION_ID, mBuilder.build());
}
}
|
phoneProfiles/src/main/java/sk/henrichg/phoneprofiles/ImportantInfoNotification.java
|
package sk.henrichg.phoneprofiles;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.support.v4.app.NotificationCompat;
public class ImportantInfoNotification {
public static final int VERSION_CODE_FOR_NEWS = 1592;
public static final int API_LEVEL_FOR_NEWS = 21;
static public void showInfoNotification(Context context) {
PackageInfo pinfo = null;
int versionCode = 0;
try {
pinfo = context.getPackageManager().getPackageInfo(context.getPackageName(), 0);
versionCode = pinfo.versionCode;
if (versionCode > GlobalData.getShowInfoNotificationOnStartVersion(context) &&
(android.os.Build.VERSION.SDK_INT >= API_LEVEL_FOR_NEWS))
GlobalData.setShowInfoNotificationOnStart(context, true, versionCode);
else
GlobalData.setShowInfoNotificationOnStartVersion(context, versionCode);
} catch (PackageManager.NameNotFoundException e) {
//e.printStackTrace();
}
if (GlobalData.getShowInfoNotificationOnStart(context, versionCode)) {
showNotificationForUnlinkRingerNotificationVolumes(context,
context.getString(R.string.info_notification_title),
context.getString(R.string.info_notification_text));
GlobalData.setShowInfoNotificationOnStart(context, false, versionCode);
}
}
static private void showNotificationForUnlinkRingerNotificationVolumes(Context context, String title, String text) {
NotificationCompat.Builder mBuilder = new NotificationCompat.Builder(context)
.setSmallIcon(R.drawable.ic_pphelper_upgrade_notify) // notification icon
.setContentTitle(title) // title for notification
.setContentText(context.getString(R.string.app_name) + ": " + text) // message for notification
.setAutoCancel(true); // clear notification after click
Intent intent = new Intent(context, ImportantInfoActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
PendingIntent pi = PendingIntent.getActivity(context, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
mBuilder.setContentIntent(pi);
if (android.os.Build.VERSION.SDK_INT >= 16)
mBuilder.setPriority(Notification.PRIORITY_MAX);
if (android.os.Build.VERSION.SDK_INT >= 21)
{
mBuilder.setCategory(Notification.CATEGORY_RECOMMENDATION);
mBuilder.setVisibility(Notification.VISIBILITY_PUBLIC);
}
NotificationManager mNotificationManager = (NotificationManager)context.getSystemService(Context.NOTIFICATION_SERVICE);
mNotificationManager.notify(GlobalData.IMPORTANT_INFO_NOTIFICATION_ID, mBuilder.build());
}
}
|
Fixed showing important info notification when version code is increased
|
phoneProfiles/src/main/java/sk/henrichg/phoneprofiles/ImportantInfoNotification.java
|
Fixed showing important info notification when version code is increased
|
|
Java
|
apache-2.0
|
1c4e2dadb8268eb7bd7c453be16e84ef1ee81787
| 0
|
archit47/seaglass,khuxtable/seaglass
|
/*
* Copyright (c) 2009 Kathryn Huxtable and Kenneth Orr.
*
* This file is part of the SeaGlass Pluggable Look and Feel.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* $Id$
*/
package com.seaglasslookandfeel.ui;
import java.awt.Graphics;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.LookAndFeel;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.UIResource;
import javax.swing.plaf.basic.BasicPanelUI;
import javax.swing.plaf.synth.SynthContext;
import javax.swing.plaf.synth.SynthStyle;
import com.seaglasslookandfeel.SeaGlassContext;
import com.seaglasslookandfeel.SeaGlassLookAndFeel;
import sun.swing.plaf.synth.SynthUI;
/**
* Sea Glass Panel UI delegate.
*
* <p>Based on SynthPanelUI, but sets the panel's opaque property to false.</p>
*/
public class SeaGlassPanelUI extends BasicPanelUI implements PropertyChangeListener, SynthUI {
private SynthStyle style;
private boolean originalOpacity;
/**
* Create a UI delegate.
*
* @param c the component for the delegate.
*
* @return the UI delegate.
*/
public static ComponentUI createUI(JComponent c) {
return new SeaGlassPanelUI();
}
/**
* @see javax.swing.plaf.basic.BasicPanelUI#installUI(javax.swing.JComponent)
*/
public void installUI(JComponent c) {
JPanel p = (JPanel) c;
super.installUI(c);
installListeners(p);
}
/**
* @see javax.swing.plaf.basic.BasicPanelUI#uninstallUI(javax.swing.JComponent)
*/
public void uninstallUI(JComponent c) {
JPanel p = (JPanel) c;
uninstallListeners(p);
super.uninstallUI(c);
}
/**
* Install the listeners.
*
* @param p the panel.
*/
protected void installListeners(JPanel p) {
p.addPropertyChangeListener(this);
}
/**
* Uninstall the listeners.
*
* @param p the panel.
*/
protected void uninstallListeners(JPanel p) {
p.removePropertyChangeListener(this);
}
/**
* @see javax.swing.plaf.basic.BasicPanelUI#installDefaults(javax.swing.JPanel)
*/
protected void installDefaults(JPanel p) {
this.originalOpacity = p.isOpaque();
updateStyle(p);
}
/**
* @see javax.swing.plaf.basic.BasicPanelUI#uninstallDefaults(javax.swing.JPanel)
*/
protected void uninstallDefaults(JPanel p) {
SeaGlassContext context = getContext(p, ENABLED);
style.uninstallDefaults(context);
context.dispose();
style = null;
// Restore original opacity if not changed by the code.
LookAndFeel.installProperty(p, "opaque", originalOpacity);
}
/**
* Update the Synth style if a property changes.
*
* @param c the panel.
*/
private void updateStyle(JPanel c) {
SeaGlassContext context = getContext(c, ENABLED);
style = SeaGlassLookAndFeel.updateStyle(context, this);
context.dispose();
// Set the opacity according to whether the background has been set.
// Don't set it if the user has already set it.
LookAndFeel.installProperty(c, "opaque", !(c.getBackground() instanceof UIResource));
}
/**
* @see sun.swing.plaf.synth.SynthUI#getContext(javax.swing.JComponent)
*/
public SeaGlassContext getContext(JComponent c) {
return getContext(c, getComponentState(c));
}
/**
* Get the Synth context.
*
* @param c the panel.
* @param state the Synth state.
*
* @return the context.
*/
private SeaGlassContext getContext(JComponent c, int state) {
return SeaGlassContext.getContext(SeaGlassContext.class, c, SeaGlassLookAndFeel.getRegion(c), style, state);
}
/**
* Get the Synth state of the panel.
*
* @param c the panel.
*
* @return the state.
*/
private int getComponentState(JComponent c) {
return SeaGlassLookAndFeel.getComponentState(c);
}
/**
* @see javax.swing.plaf.ComponentUI#update(java.awt.Graphics, javax.swing.JComponent)
*/
public void update(Graphics g, JComponent c) {
SeaGlassContext context = getContext(c);
SeaGlassLookAndFeel.update(context, g);
context.getPainter().paintPanelBackground(context, g, 0, 0, c.getWidth(), c.getHeight());
paint(context, g);
context.dispose();
}
/**
* @see javax.swing.plaf.ComponentUI#paint(java.awt.Graphics, javax.swing.JComponent)
*/
public void paint(Graphics g, JComponent c) {
SeaGlassContext context = getContext(c);
paint(context, g);
context.dispose();
}
/**
* Paint the panel. Nothing to do here. The panel is painted by its
* components.
*
* @param context the Synth context.
* @param g the Graphics context.
*/
protected void paint(SeaGlassContext context, Graphics g) {
// do actual painting
}
/**
* @see sun.swing.plaf.synth.SynthUI#paintBorder(javax.swing.plaf.synth.SynthContext,
* java.awt.Graphics, int, int, int, int)
*/
public void paintBorder(SynthContext context, Graphics g, int x, int y, int w, int h) {
((SeaGlassContext) context).getPainter().paintPanelBorder(context, g, x, y, w, h);
}
/**
* @see java.beans.PropertyChangeListener#propertyChange(java.beans.PropertyChangeEvent)
*/
public void propertyChange(PropertyChangeEvent pce) {
if (SeaGlassLookAndFeel.shouldUpdateStyle(pce)) {
updateStyle((JPanel) pce.getSource());
} else if (pce.getPropertyName() == "background") {
updateStyle((JPanel) pce.getSource());
}
}
}
|
seaglass/trunk/seaglass/src/main/java/com/seaglasslookandfeel/ui/SeaGlassPanelUI.java
|
/*
* Copyright (c) 2009 Kathryn Huxtable and Kenneth Orr.
*
* This file is part of the SeaGlass Pluggable Look and Feel.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* $Id: SeaGlassRootPaneUI.java 862 2010-01-22 16:02:13Z kathryn@kathrynhuxtable.org $
*/
package com.seaglasslookandfeel.ui;
import java.awt.Graphics;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.UIResource;
import javax.swing.plaf.basic.BasicPanelUI;
import javax.swing.plaf.synth.SynthContext;
import javax.swing.plaf.synth.SynthStyle;
import com.seaglasslookandfeel.SeaGlassContext;
import com.seaglasslookandfeel.SeaGlassLookAndFeel;
import sun.swing.plaf.synth.SynthUI;
/**
* Sea Glass Panel UI delegate.
*
* Based on SynthPanelUI, but sets the panel's opaque property to false.
*/
public class SeaGlassPanelUI extends BasicPanelUI implements PropertyChangeListener, SynthUI {
private SynthStyle style;
public static ComponentUI createUI(JComponent c) {
return new SeaGlassPanelUI();
}
public void installUI(JComponent c) {
JPanel p = (JPanel) c;
super.installUI(c);
installListeners(p);
}
public void uninstallUI(JComponent c) {
JPanel p = (JPanel) c;
uninstallListeners(p);
super.uninstallUI(c);
}
protected void installListeners(JPanel p) {
p.addPropertyChangeListener(this);
}
protected void uninstallListeners(JPanel p) {
p.removePropertyChangeListener(this);
}
protected void installDefaults(JPanel p) {
updateStyle(p);
}
protected void uninstallDefaults(JPanel p) {
SeaGlassContext context = getContext(p, ENABLED);
style.uninstallDefaults(context);
context.dispose();
style = null;
}
private void updateStyle(JPanel c) {
SeaGlassContext context = getContext(c, ENABLED);
style = SeaGlassLookAndFeel.updateStyle(context, this);
context.dispose();
}
public SeaGlassContext getContext(JComponent c) {
return getContext(c, getComponentState(c));
}
private SeaGlassContext getContext(JComponent c, int state) {
return SeaGlassContext.getContext(SeaGlassContext.class, c, SeaGlassLookAndFeel.getRegion(c), style, state);
}
private int getComponentState(JComponent c) {
return SeaGlassLookAndFeel.getComponentState(c);
}
public void update(Graphics g, JComponent c) {
SeaGlassContext context = getContext(c);
// Subvert paintRegion, which is called by SeaGlassLookAndFeel.update.
// We don't want to paint panel background if it has not been set by the
// user.
if (!(c.getBackground() instanceof UIResource)) {
SeaGlassLookAndFeel.update(context, g);
context.getPainter().paintPanelBackground(context, g, 0, 0, c.getWidth(), c.getHeight());
}
paint(context, g);
context.dispose();
}
public void paint(Graphics g, JComponent c) {
SeaGlassContext context = getContext(c);
paint(context, g);
context.dispose();
}
protected void paint(SeaGlassContext context, Graphics g) {
// do actual painting
}
public void paintBorder(SynthContext context, Graphics g, int x, int y, int w, int h) {
((SeaGlassContext) context).getPainter().paintPanelBorder(context, g, x, y, w, h);
}
public void propertyChange(PropertyChangeEvent pce) {
if (SeaGlassLookAndFeel.shouldUpdateStyle(pce)) {
updateStyle((JPanel) pce.getSource());
}
}
}
|
Changed method of handling background gradient to use LookAndFeel.installProperty rather than subverting paint.
This should fix issue 23.
|
seaglass/trunk/seaglass/src/main/java/com/seaglasslookandfeel/ui/SeaGlassPanelUI.java
|
Changed method of handling background gradient to use LookAndFeel.installProperty rather than subverting paint. This should fix issue 23.
|
|
Java
|
apache-2.0
|
7f8f1ca9a0ee8bfd5d7da563b287038abecfbf42
| 0
|
apache/tomcat,apache/tomcat,apache/tomcat,apache/tomcat,apache/tomcat
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.coyote;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanRegistration;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import jakarta.servlet.http.HttpUpgradeHandler;
import jakarta.servlet.http.WebConnection;
import org.apache.coyote.http11.upgrade.InternalHttpUpgradeHandler;
import org.apache.juli.logging.Log;
import org.apache.tomcat.InstanceManager;
import org.apache.tomcat.util.ExceptionUtils;
import org.apache.tomcat.util.collections.SynchronizedStack;
import org.apache.tomcat.util.modeler.Registry;
import org.apache.tomcat.util.net.AbstractEndpoint;
import org.apache.tomcat.util.net.AbstractEndpoint.Handler;
import org.apache.tomcat.util.net.SocketEvent;
import org.apache.tomcat.util.net.SocketWrapperBase;
import org.apache.tomcat.util.res.StringManager;
public abstract class AbstractProtocol<S> implements ProtocolHandler,
MBeanRegistration {
/**
* The string manager for this package.
*/
private static final StringManager sm = StringManager.getManager(AbstractProtocol.class);
/**
* Counter used to generate unique JMX names for connectors using automatic
* port binding.
*/
private static final AtomicInteger nameCounter = new AtomicInteger(0);
/**
* Unique ID for this connector. Only used if the connector is configured
* to use a random port as the port will change if stop(), start() is
* called.
*/
private int nameIndex = 0;
/**
* Endpoint that provides low-level network I/O - must be matched to the
* ProtocolHandler implementation (ProtocolHandler using NIO, requires NIO
* Endpoint etc.).
*/
private final AbstractEndpoint<S,?> endpoint;
private Handler<S> handler;
private final Set<Processor> waitingProcessors =
Collections.newSetFromMap(new ConcurrentHashMap<>());
/**
* Controller for the timeout scheduling.
*/
private ScheduledFuture<?> timeoutFuture = null;
private ScheduledFuture<?> monitorFuture;
public AbstractProtocol(AbstractEndpoint<S,?> endpoint) {
this.endpoint = endpoint;
setConnectionLinger(Constants.DEFAULT_CONNECTION_LINGER);
setTcpNoDelay(Constants.DEFAULT_TCP_NO_DELAY);
}
// ----------------------------------------------- Generic property handling
/**
* Generic property setter used by the digester. Other code should not need
* to use this. The digester will only use this method if it can't find a
* more specific setter. That means the property belongs to the Endpoint,
* the ServerSocketFactory or some other lower level component. This method
* ensures that it is visible to both.
*
* @param name The name of the property to set
* @param value The value, in string form, to set for the property
*
* @return <code>true</code> if the property was set successfully, otherwise
* <code>false</code>
*/
public boolean setProperty(String name, String value) {
return endpoint.setProperty(name, value);
}
/**
* Generic property getter used by the digester. Other code should not need
* to use this.
*
* @param name The name of the property to get
*
* @return The value of the property converted to a string
*/
public String getProperty(String name) {
return endpoint.getProperty(name);
}
// ------------------------------- Properties managed by the ProtocolHandler
/**
* Name of MBean for the Global Request Processor.
*/
protected ObjectName rgOname = null;
public ObjectName getGlobalRequestProcessorMBeanName() {
return rgOname;
}
/**
* The adapter provides the link between the ProtocolHandler and the
* connector.
*/
protected Adapter adapter;
@Override
public void setAdapter(Adapter adapter) { this.adapter = adapter; }
@Override
public Adapter getAdapter() { return adapter; }
/**
* The maximum number of idle processors that will be retained in the cache
* and re-used with a subsequent request. The default is 200. A value of -1
* means unlimited. In the unlimited case, the theoretical maximum number of
* cached Processor objects is {@link #getMaxConnections()} although it will
* usually be closer to {@link #getMaxThreads()}.
*/
protected int processorCache = 200;
public int getProcessorCache() { return this.processorCache; }
public void setProcessorCache(int processorCache) {
this.processorCache = processorCache;
}
private String clientCertProvider = null;
/**
* When client certificate information is presented in a form other than
* instances of {@link java.security.cert.X509Certificate} it needs to be
* converted before it can be used and this property controls which JSSE
* provider is used to perform the conversion. For example it is used with
* the AJP connectors and with the
* {@link org.apache.catalina.valves.SSLValve}. If not specified, the
* default provider will be used.
*
* @return The name of the JSSE provider to use
*/
public String getClientCertProvider() { return clientCertProvider; }
public void setClientCertProvider(String s) { this.clientCertProvider = s; }
private int maxHeaderCount = 100;
public int getMaxHeaderCount() {
return maxHeaderCount;
}
public void setMaxHeaderCount(int maxHeaderCount) {
this.maxHeaderCount = maxHeaderCount;
}
@Override
public boolean isSendfileSupported() {
return endpoint.getUseSendfile();
}
@Override
public String getId() {
return endpoint.getId();
}
// ---------------------- Properties that are passed through to the EndPoint
@Override
public Executor getExecutor() { return endpoint.getExecutor(); }
@Override
public void setExecutor(Executor executor) {
endpoint.setExecutor(executor);
}
@Override
public ScheduledExecutorService getUtilityExecutor() { return endpoint.getUtilityExecutor(); }
@Override
public void setUtilityExecutor(ScheduledExecutorService utilityExecutor) {
endpoint.setUtilityExecutor(utilityExecutor);
}
public int getMaxThreads() { return endpoint.getMaxThreads(); }
public void setMaxThreads(int maxThreads) {
endpoint.setMaxThreads(maxThreads);
}
public int getMaxConnections() { return endpoint.getMaxConnections(); }
public void setMaxConnections(int maxConnections) {
endpoint.setMaxConnections(maxConnections);
}
public int getMinSpareThreads() { return endpoint.getMinSpareThreads(); }
public void setMinSpareThreads(int minSpareThreads) {
endpoint.setMinSpareThreads(minSpareThreads);
}
public int getThreadPriority() { return endpoint.getThreadPriority(); }
public void setThreadPriority(int threadPriority) {
endpoint.setThreadPriority(threadPriority);
}
public int getAcceptCount() { return endpoint.getAcceptCount(); }
public void setAcceptCount(int acceptCount) { endpoint.setAcceptCount(acceptCount); }
public boolean getTcpNoDelay() { return endpoint.getTcpNoDelay(); }
public void setTcpNoDelay(boolean tcpNoDelay) {
endpoint.setTcpNoDelay(tcpNoDelay);
}
public int getConnectionLinger() { return endpoint.getConnectionLinger(); }
public void setConnectionLinger(int connectionLinger) {
endpoint.setConnectionLinger(connectionLinger);
}
/**
* The time Tomcat will wait for a subsequent request before closing the
* connection. The default is {@link #getConnectionTimeout()}.
*
* @return The timeout in milliseconds
*/
public int getKeepAliveTimeout() { return endpoint.getKeepAliveTimeout(); }
public void setKeepAliveTimeout(int keepAliveTimeout) {
endpoint.setKeepAliveTimeout(keepAliveTimeout);
}
public InetAddress getAddress() { return endpoint.getAddress(); }
public void setAddress(InetAddress ia) {
endpoint.setAddress(ia);
}
public int getPort() { return endpoint.getPort(); }
public void setPort(int port) {
endpoint.setPort(port);
}
public int getPortOffset() { return endpoint.getPortOffset(); }
public void setPortOffset(int portOffset) {
endpoint.setPortOffset(portOffset);
}
public int getPortWithOffset() { return endpoint.getPortWithOffset(); }
public int getLocalPort() { return endpoint.getLocalPort(); }
/*
* When Tomcat expects data from the client, this is the time Tomcat will
* wait for that data to arrive before closing the connection.
*/
public int getConnectionTimeout() {
return endpoint.getConnectionTimeout();
}
public void setConnectionTimeout(int timeout) {
endpoint.setConnectionTimeout(timeout);
}
public long getConnectionCount() {
return endpoint.getConnectionCount();
}
public void setAcceptorThreadPriority(int threadPriority) {
endpoint.setAcceptorThreadPriority(threadPriority);
}
public int getAcceptorThreadPriority() {
return endpoint.getAcceptorThreadPriority();
}
// ---------------------------------------------------------- Public methods
public synchronized int getNameIndex() {
if (nameIndex == 0) {
nameIndex = nameCounter.incrementAndGet();
}
return nameIndex;
}
/**
* The name will be prefix-address-port if address is non-null and
* prefix-port if the address is null.
*
* @return A name for this protocol instance that is appropriately quoted
* for use in an ObjectName.
*/
public String getName() {
return ObjectName.quote(getNameInternal());
}
private String getNameInternal() {
StringBuilder name = new StringBuilder(getNamePrefix());
name.append('-');
String id = getId();
if (id != null) {
name.append(id);
} else {
if (getAddress() != null) {
name.append(getAddress().getHostAddress());
name.append('-');
}
int port = getPortWithOffset();
if (port == 0) {
// Auto binding is in use. Check if port is known
name.append("auto-");
name.append(getNameIndex());
port = getLocalPort();
if (port != -1) {
name.append('-');
name.append(port);
}
} else {
name.append(port);
}
}
return name.toString();
}
public void addWaitingProcessor(Processor processor) {
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractProtocol.waitingProcessor.add", processor));
}
waitingProcessors.add(processor);
}
public void removeWaitingProcessor(Processor processor) {
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractProtocol.waitingProcessor.remove", processor));
}
waitingProcessors.remove(processor);
}
/*
* Primarily for debugging and testing. Could be exposed via JMX if
* considered useful.
*/
public int getWaitingProcessorCount() {
return waitingProcessors.size();
}
// ----------------------------------------------- Accessors for sub-classes
protected AbstractEndpoint<S,?> getEndpoint() {
return endpoint;
}
protected Handler<S> getHandler() {
return handler;
}
protected void setHandler(Handler<S> handler) {
this.handler = handler;
}
// -------------------------------------------------------- Abstract methods
/**
* Concrete implementations need to provide access to their logger to be
* used by the abstract classes.
* @return the logger
*/
protected abstract Log getLog();
/**
* Obtain the prefix to be used when construction a name for this protocol
* handler. The name will be prefix-address-port.
* @return the prefix
*/
protected abstract String getNamePrefix();
/**
* Obtain the name of the protocol, (Http, Ajp, etc.). Used with JMX.
* @return the protocol name
*/
protected abstract String getProtocolName();
/**
* Find a suitable handler for the protocol negotiated
* at the network layer.
* @param name The name of the requested negotiated protocol.
* @return The instance where {@link UpgradeProtocol#getAlpnName()} matches
* the requested protocol
*/
protected abstract UpgradeProtocol getNegotiatedProtocol(String name);
/**
* Find a suitable handler for the protocol upgraded name specified. This
* is used for direct connection protocol selection.
* @param name The name of the requested negotiated protocol.
* @return The instance where {@link UpgradeProtocol#getAlpnName()} matches
* the requested protocol
*/
protected abstract UpgradeProtocol getUpgradeProtocol(String name);
/**
* Create and configure a new Processor instance for the current protocol
* implementation.
*
* @return A fully configured Processor instance that is ready to use
*/
protected abstract Processor createProcessor();
protected abstract Processor createUpgradeProcessor(
SocketWrapperBase<?> socket,
UpgradeToken upgradeToken);
// ----------------------------------------------------- JMX related methods
protected String domain;
protected ObjectName oname;
protected MBeanServer mserver;
public ObjectName getObjectName() {
return oname;
}
public String getDomain() {
return domain;
}
@Override
public ObjectName preRegister(MBeanServer server, ObjectName name)
throws Exception {
oname = name;
mserver = server;
domain = name.getDomain();
return name;
}
@Override
public void postRegister(Boolean registrationDone) {
// NOOP
}
@Override
public void preDeregister() throws Exception {
// NOOP
}
@Override
public void postDeregister() {
// NOOP
}
private ObjectName createObjectName() throws MalformedObjectNameException {
// Use the same domain as the connector
domain = getAdapter().getDomain();
if (domain == null) {
return null;
}
StringBuilder name = new StringBuilder(getDomain());
name.append(":type=ProtocolHandler,port=");
int port = getPortWithOffset();
if (port > 0) {
name.append(port);
} else {
name.append("auto-");
name.append(getNameIndex());
}
InetAddress address = getAddress();
if (address != null) {
name.append(",address=");
name.append(ObjectName.quote(address.getHostAddress()));
}
return new ObjectName(name.toString());
}
// ------------------------------------------------------- Lifecycle methods
/*
* NOTE: There is no maintenance of state or checking for valid transitions
* within this class. It is expected that the connector will maintain state
* and prevent invalid state transitions.
*/
@Override
public void init() throws Exception {
if (getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.init", getName()));
logPortOffset();
}
if (oname == null) {
// Component not pre-registered so register it
oname = createObjectName();
if (oname != null) {
Registry.getRegistry(null, null).registerComponent(this, oname, null);
}
}
if (this.domain != null) {
ObjectName rgOname = new ObjectName(domain + ":type=GlobalRequestProcessor,name=" + getName());
this.rgOname = rgOname;
Registry.getRegistry(null, null).registerComponent(
getHandler().getGlobal(), rgOname, null);
}
String endpointName = getName();
endpoint.setName(endpointName.substring(1, endpointName.length()-1));
endpoint.setDomain(domain);
endpoint.init();
}
@Override
public void start() throws Exception {
if (getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.start", getName()));
logPortOffset();
}
endpoint.start();
monitorFuture = getUtilityExecutor().scheduleWithFixedDelay(
() -> {
if (!isPaused()) {
startAsyncTimeout();
}
}, 0, 60, TimeUnit.SECONDS);
}
/**
* Note: The name of this method originated with the Servlet 3.0
* asynchronous processing but evolved over time to represent a timeout that
* is triggered independently of the socket read/write timeouts.
*/
protected void startAsyncTimeout() {
if (timeoutFuture == null || timeoutFuture.isDone()) {
if (timeoutFuture != null && timeoutFuture.isDone()) {
// There was an error executing the scheduled task, get it and log it
try {
timeoutFuture.get();
} catch (InterruptedException | ExecutionException e) {
getLog().error(sm.getString("abstractProtocolHandler.asyncTimeoutError"), e);
}
}
timeoutFuture = getUtilityExecutor().scheduleAtFixedRate(
() -> {
long now = System.currentTimeMillis();
for (Processor processor : waitingProcessors) {
processor.timeoutAsync(now);
}
}, 1, 1, TimeUnit.SECONDS);
}
}
protected void stopAsyncTimeout() {
if (timeoutFuture != null) {
timeoutFuture.cancel(false);
timeoutFuture = null;
}
}
@Override
public void pause() throws Exception {
if (getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.pause", getName()));
}
stopAsyncTimeout();
endpoint.pause();
}
public boolean isPaused() {
return endpoint.isPaused();
}
@Override
public void resume() throws Exception {
if(getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.resume", getName()));
}
endpoint.resume();
startAsyncTimeout();
}
@Override
public void stop() throws Exception {
if(getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.stop", getName()));
logPortOffset();
}
if (monitorFuture != null) {
monitorFuture.cancel(true);
monitorFuture = null;
}
stopAsyncTimeout();
// Timeout any waiting processor
for (Processor processor : waitingProcessors) {
processor.timeoutAsync(-1);
}
endpoint.stop();
}
@Override
public void destroy() throws Exception {
if(getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.destroy", getName()));
logPortOffset();
}
try {
endpoint.destroy();
} finally {
if (oname != null) {
if (mserver == null) {
Registry.getRegistry(null, null).unregisterComponent(oname);
} else {
// Possibly registered with a different MBeanServer
try {
mserver.unregisterMBean(oname);
} catch (MBeanRegistrationException | InstanceNotFoundException e) {
getLog().info(sm.getString("abstractProtocol.mbeanDeregistrationFailed",
oname, mserver));
}
}
}
ObjectName rgOname = getGlobalRequestProcessorMBeanName();
if (rgOname != null) {
Registry.getRegistry(null, null).unregisterComponent(rgOname);
}
}
}
@Override
public void closeServerSocketGraceful() {
endpoint.closeServerSocketGraceful();
}
@Override
public long awaitConnectionsClose(long waitMillis) {
getLog().info(sm.getString("abstractProtocol.closeConnectionsAwait",
Long.valueOf(waitMillis), getName()));
return endpoint.awaitConnectionsClose(waitMillis);
}
private void logPortOffset() {
if (getPort() != getPortWithOffset()) {
getLog().info(sm.getString("abstractProtocolHandler.portOffset", getName(),
String.valueOf(getPort()), String.valueOf(getPortOffset())));
}
}
// ------------------------------------------- Connection handler base class
protected static class ConnectionHandler<S> implements AbstractEndpoint.Handler<S> {
private final AbstractProtocol<S> proto;
private final RequestGroupInfo global = new RequestGroupInfo();
private final AtomicLong registerCount = new AtomicLong(0);
private final RecycledProcessors recycledProcessors = new RecycledProcessors(this);
public ConnectionHandler(AbstractProtocol<S> proto) {
this.proto = proto;
}
protected AbstractProtocol<S> getProtocol() {
return proto;
}
protected Log getLog() {
return getProtocol().getLog();
}
@Override
public Object getGlobal() {
return global;
}
@Override
public void recycle() {
recycledProcessors.clear();
}
@Override
public SocketState process(SocketWrapperBase<S> wrapper, SocketEvent status) {
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.process",
wrapper.getSocket(), status));
}
if (wrapper == null) {
// Nothing to do. Socket has been closed.
return SocketState.CLOSED;
}
S socket = wrapper.getSocket();
Processor processor = (Processor) wrapper.getCurrentProcessor();
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.connectionsGet",
processor, socket));
}
// Timeouts are calculated on a dedicated thread and then
// dispatched. Because of delays in the dispatch process, the
// timeout may no longer be required. Check here and avoid
// unnecessary processing.
if (SocketEvent.TIMEOUT == status &&
(processor == null ||
!processor.isAsync() && !processor.isUpgrade() ||
processor.isAsync() && !processor.checkAsyncTimeoutGeneration())) {
// This is effectively a NO-OP
return SocketState.OPEN;
}
if (processor != null) {
// Make sure an async timeout doesn't fire
getProtocol().removeWaitingProcessor(processor);
} else if (status == SocketEvent.DISCONNECT || status == SocketEvent.ERROR) {
// Nothing to do. Endpoint requested a close and there is no
// longer a processor associated with this socket.
return SocketState.CLOSED;
}
ContainerThreadMarker.set();
try {
if (processor == null) {
String negotiatedProtocol = wrapper.getNegotiatedProtocol();
// OpenSSL typically returns null whereas JSSE typically
// returns "" when no protocol is negotiated
if (negotiatedProtocol != null && negotiatedProtocol.length() > 0) {
UpgradeProtocol upgradeProtocol = getProtocol().getNegotiatedProtocol(negotiatedProtocol);
if (upgradeProtocol != null) {
processor = upgradeProtocol.getProcessor(wrapper, getProtocol().getAdapter());
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.processorCreate", processor));
}
} else if (negotiatedProtocol.equals("http/1.1")) {
// Explicitly negotiated the default protocol.
// Obtain a processor below.
} else {
// TODO:
// OpenSSL 1.0.2's ALPN callback doesn't support
// failing the handshake with an error if no
// protocol can be negotiated. Therefore, we need to
// fail the connection here. Once this is fixed,
// replace the code below with the commented out
// block.
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.negotiatedProcessor.fail",
negotiatedProtocol));
}
return SocketState.CLOSED;
/*
* To replace the code above once OpenSSL 1.1.0 is
* used.
// Failed to create processor. This is a bug.
throw new IllegalStateException(sm.getString(
"abstractConnectionHandler.negotiatedProcessor.fail",
negotiatedProtocol));
*/
}
}
}
if (processor == null) {
processor = recycledProcessors.pop();
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.processorPop", processor));
}
}
if (processor == null) {
processor = getProtocol().createProcessor();
register(processor);
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.processorCreate", processor));
}
}
processor.setSslSupport(wrapper.getSslSupport());
// Associate the processor with the connection
wrapper.setCurrentProcessor(processor);
SocketState state = SocketState.CLOSED;
do {
state = processor.process(wrapper, status);
if (state == SocketState.UPGRADING) {
// Get the HTTP upgrade handler
UpgradeToken upgradeToken = processor.getUpgradeToken();
// Restore leftover input to the wrapper so the upgrade
// processor can process it.
ByteBuffer leftOverInput = processor.getLeftoverInput();
wrapper.unRead(leftOverInput);
if (upgradeToken == null) {
// Assume direct HTTP/2 connection
UpgradeProtocol upgradeProtocol = getProtocol().getUpgradeProtocol("h2c");
if (upgradeProtocol != null) {
// Release the Http11 processor to be re-used
release(processor);
// Create the upgrade processor
processor = upgradeProtocol.getProcessor(wrapper, getProtocol().getAdapter());
// Associate with the processor with the connection
wrapper.setCurrentProcessor(processor);
} else {
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString(
"abstractConnectionHandler.negotiatedProcessor.fail",
"h2c"));
}
// Exit loop and trigger appropriate clean-up
state = SocketState.CLOSED;
}
} else {
HttpUpgradeHandler httpUpgradeHandler = upgradeToken.getHttpUpgradeHandler();
// Release the Http11 processor to be re-used
release(processor);
// Create the upgrade processor
processor = getProtocol().createUpgradeProcessor(wrapper, upgradeToken);
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.upgradeCreate",
processor, wrapper));
}
// Associate with the processor with the connection
wrapper.setCurrentProcessor(processor);
// Initialise the upgrade handler (which may trigger
// some IO using the new protocol which is why the lines
// above are necessary)
// This cast should be safe. If it fails the error
// handling for the surrounding try/catch will deal with
// it.
if (upgradeToken.getInstanceManager() == null) {
httpUpgradeHandler.init((WebConnection) processor);
} else {
ClassLoader oldCL = upgradeToken.getContextBind().bind(false, null);
try {
httpUpgradeHandler.init((WebConnection) processor);
} finally {
upgradeToken.getContextBind().unbind(false, oldCL);
}
}
if (httpUpgradeHandler instanceof InternalHttpUpgradeHandler) {
if (((InternalHttpUpgradeHandler) httpUpgradeHandler).hasAsyncIO()) {
// The handler will initiate all further I/O
state = SocketState.UPGRADED;
}
}
}
}
} while ( state == SocketState.UPGRADING);
if (state == SocketState.LONG) {
// In the middle of processing a request/response. Keep the
// socket associated with the processor. Exact requirements
// depend on type of long poll
longPoll(wrapper, processor);
if (processor.isAsync()) {
getProtocol().addWaitingProcessor(processor);
}
} else if (state == SocketState.OPEN) {
// In keep-alive but between requests. OK to recycle
// processor. Continue to poll for the next request.
wrapper.setCurrentProcessor(null);
release(processor);
wrapper.registerReadInterest();
} else if (state == SocketState.SENDFILE) {
// Sendfile in progress. If it fails, the socket will be
// closed. If it works, the socket either be added to the
// poller (or equivalent) to await more data or processed
// if there are any pipe-lined requests remaining.
} else if (state == SocketState.UPGRADED) {
// Don't add sockets back to the poller if this was a
// non-blocking write otherwise the poller may trigger
// multiple read events which may lead to thread starvation
// in the connector. The write() method will add this socket
// to the poller if necessary.
if (status != SocketEvent.OPEN_WRITE) {
longPoll(wrapper, processor);
getProtocol().addWaitingProcessor(processor);
}
} else if (state == SocketState.SUSPENDED) {
// Don't add sockets back to the poller.
// The resumeProcessing() method will add this socket
// to the poller.
} else {
// Connection closed. OK to recycle the processor.
// Processors handling upgrades require additional clean-up
// before release.
wrapper.setCurrentProcessor(null);
if (processor.isUpgrade()) {
UpgradeToken upgradeToken = processor.getUpgradeToken();
HttpUpgradeHandler httpUpgradeHandler = upgradeToken.getHttpUpgradeHandler();
InstanceManager instanceManager = upgradeToken.getInstanceManager();
if (instanceManager == null) {
httpUpgradeHandler.destroy();
} else {
ClassLoader oldCL = upgradeToken.getContextBind().bind(false, null);
try {
httpUpgradeHandler.destroy();
} finally {
try {
instanceManager.destroyInstance(httpUpgradeHandler);
} catch (Throwable e) {
ExceptionUtils.handleThrowable(e);
getLog().error(sm.getString("abstractConnectionHandler.error"), e);
}
upgradeToken.getContextBind().unbind(false, oldCL);
}
}
}
release(processor);
}
return state;
} catch(java.net.SocketException e) {
// SocketExceptions are normal
getLog().debug(sm.getString(
"abstractConnectionHandler.socketexception.debug"), e);
} catch (java.io.IOException e) {
// IOExceptions are normal
getLog().debug(sm.getString(
"abstractConnectionHandler.ioexception.debug"), e);
} catch (ProtocolException e) {
// Protocol exceptions normally mean the client sent invalid or
// incomplete data.
getLog().debug(sm.getString(
"abstractConnectionHandler.protocolexception.debug"), e);
}
// Future developers: if you discover any other
// rare-but-nonfatal exceptions, catch them here, and log as
// above.
catch (OutOfMemoryError oome) {
// Try and handle this here to give Tomcat a chance to close the
// connection and prevent clients waiting until they time out.
// Worst case, it isn't recoverable and the attempt at logging
// will trigger another OOME.
getLog().error(sm.getString("abstractConnectionHandler.oome"), oome);
} catch (Throwable e) {
ExceptionUtils.handleThrowable(e);
// any other exception or error is odd. Here we log it
// with "ERROR" level, so it will show up even on
// less-than-verbose logs.
getLog().error(sm.getString("abstractConnectionHandler.error"), e);
} finally {
ContainerThreadMarker.clear();
}
// Make sure socket/processor is removed from the list of current
// connections
wrapper.setCurrentProcessor(null);
release(processor);
return SocketState.CLOSED;
}
protected void longPoll(SocketWrapperBase<?> socket, Processor processor) {
if (!processor.isAsync()) {
// This is currently only used with HTTP
// Either:
// - this is an upgraded connection
// - the request line/headers have not been completely
// read
socket.registerReadInterest();
}
}
/**
* Expected to be used by the handler once the processor is no longer
* required.
*
* @param processor Processor being released (that was associated with
* the socket)
*/
private void release(Processor processor) {
if (processor != null) {
processor.recycle();
if (processor.isUpgrade()) {
// While UpgradeProcessor instances should not normally be
// present in waitingProcessors there are various scenarios
// where this can happen. E.g.:
// - when AsyncIO is used
// - WebSocket I/O error on non-container thread
// Err on the side of caution and always try and remove any
// UpgradeProcessor instances from waitingProcessors
getProtocol().removeWaitingProcessor(processor);
} else {
// After recycling, only instances of UpgradeProcessorBase
// will return true for isUpgrade().
// Instances of UpgradeProcessorBase should not be added to
// recycledProcessors since that pool is only for AJP or
// HTTP processors
recycledProcessors.push(processor);
if (getLog().isDebugEnabled()) {
getLog().debug("Pushed Processor [" + processor + "]");
}
}
}
}
/**
* Expected to be used by the Endpoint to release resources on socket
* close, errors etc.
*/
@Override
public void release(SocketWrapperBase<S> socketWrapper) {
Processor processor = (Processor) socketWrapper.getCurrentProcessor();
socketWrapper.setCurrentProcessor(null);
release(processor);
}
protected void register(Processor processor) {
if (getProtocol().getDomain() != null) {
synchronized (this) {
try {
long count = registerCount.incrementAndGet();
RequestInfo rp =
processor.getRequest().getRequestProcessor();
rp.setGlobalProcessor(global);
ObjectName rpName = new ObjectName(
getProtocol().getDomain() +
":type=RequestProcessor,worker="
+ getProtocol().getName() +
",name=" + getProtocol().getProtocolName() +
"Request" + count);
if (getLog().isDebugEnabled()) {
getLog().debug("Register [" + processor + "] as [" + rpName + "]");
}
Registry.getRegistry(null, null).registerComponent(rp,
rpName, null);
rp.setRpName(rpName);
} catch (Exception e) {
getLog().warn(sm.getString("abstractProtocol.processorRegisterError"), e);
}
}
}
}
protected void unregister(Processor processor) {
if (getProtocol().getDomain() != null) {
synchronized (this) {
try {
Request r = processor.getRequest();
if (r == null) {
// Probably an UpgradeProcessor
return;
}
RequestInfo rp = r.getRequestProcessor();
rp.setGlobalProcessor(null);
ObjectName rpName = rp.getRpName();
if (getLog().isDebugEnabled()) {
getLog().debug("Unregister [" + rpName + "]");
}
Registry.getRegistry(null, null).unregisterComponent(
rpName);
rp.setRpName(null);
} catch (Exception e) {
getLog().warn(sm.getString("abstractProtocol.processorUnregisterError"), e);
}
}
}
}
@Override
public final void pause() {
/*
* Inform all the processors associated with current connections
* that the endpoint is being paused. Most won't care. Those
* processing multiplexed streams may wish to take action. For
* example, HTTP/2 may wish to stop accepting new streams.
*
* Note that even if the endpoint is resumed, there is (currently)
* no API to inform the Processors of this.
*/
for (SocketWrapperBase<S> wrapper : proto.getEndpoint().getConnections()) {
Processor processor = (Processor) wrapper.getCurrentProcessor();
if (processor != null) {
processor.pause();
}
}
}
}
protected static class RecycledProcessors extends SynchronizedStack<Processor> {
private final transient ConnectionHandler<?> handler;
protected final AtomicInteger size = new AtomicInteger(0);
public RecycledProcessors(ConnectionHandler<?> handler) {
this.handler = handler;
}
@SuppressWarnings("sync-override") // Size may exceed cache size a bit
@Override
public boolean push(Processor processor) {
int cacheSize = handler.getProtocol().getProcessorCache();
boolean offer = cacheSize == -1 ? true : size.get() < cacheSize;
//avoid over growing our cache or add after we have stopped
boolean result = false;
if (offer) {
result = super.push(processor);
if (result) {
size.incrementAndGet();
}
}
if (!result) {
handler.unregister(processor);
}
return result;
}
@SuppressWarnings("sync-override") // OK if size is too big briefly
@Override
public Processor pop() {
Processor result = super.pop();
if (result != null) {
size.decrementAndGet();
}
return result;
}
@Override
public synchronized void clear() {
Processor next = pop();
while (next != null) {
handler.unregister(next);
next = pop();
}
super.clear();
size.set(0);
}
}
}
|
java/org/apache/coyote/AbstractProtocol.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.coyote;
import java.net.InetAddress;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanRegistration;
import javax.management.MBeanRegistrationException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import jakarta.servlet.http.HttpUpgradeHandler;
import jakarta.servlet.http.WebConnection;
import org.apache.coyote.http11.upgrade.InternalHttpUpgradeHandler;
import org.apache.juli.logging.Log;
import org.apache.tomcat.InstanceManager;
import org.apache.tomcat.util.ExceptionUtils;
import org.apache.tomcat.util.collections.SynchronizedStack;
import org.apache.tomcat.util.modeler.Registry;
import org.apache.tomcat.util.net.AbstractEndpoint;
import org.apache.tomcat.util.net.AbstractEndpoint.Handler;
import org.apache.tomcat.util.net.SocketEvent;
import org.apache.tomcat.util.net.SocketWrapperBase;
import org.apache.tomcat.util.res.StringManager;
public abstract class AbstractProtocol<S> implements ProtocolHandler,
MBeanRegistration {
/**
* The string manager for this package.
*/
private static final StringManager sm = StringManager.getManager(AbstractProtocol.class);
/**
* Counter used to generate unique JMX names for connectors using automatic
* port binding.
*/
private static final AtomicInteger nameCounter = new AtomicInteger(0);
/**
* Unique ID for this connector. Only used if the connector is configured
* to use a random port as the port will change if stop(), start() is
* called.
*/
private int nameIndex = 0;
/**
* Endpoint that provides low-level network I/O - must be matched to the
* ProtocolHandler implementation (ProtocolHandler using NIO, requires NIO
* Endpoint etc.).
*/
private final AbstractEndpoint<S,?> endpoint;
private Handler<S> handler;
private final Set<Processor> waitingProcessors =
Collections.newSetFromMap(new ConcurrentHashMap<>());
/**
* Controller for the timeout scheduling.
*/
private ScheduledFuture<?> timeoutFuture = null;
private ScheduledFuture<?> monitorFuture;
public AbstractProtocol(AbstractEndpoint<S,?> endpoint) {
this.endpoint = endpoint;
setConnectionLinger(Constants.DEFAULT_CONNECTION_LINGER);
setTcpNoDelay(Constants.DEFAULT_TCP_NO_DELAY);
}
// ----------------------------------------------- Generic property handling
/**
* Generic property setter used by the digester. Other code should not need
* to use this. The digester will only use this method if it can't find a
* more specific setter. That means the property belongs to the Endpoint,
* the ServerSocketFactory or some other lower level component. This method
* ensures that it is visible to both.
*
* @param name The name of the property to set
* @param value The value, in string form, to set for the property
*
* @return <code>true</code> if the property was set successfully, otherwise
* <code>false</code>
*/
public boolean setProperty(String name, String value) {
return endpoint.setProperty(name, value);
}
/**
* Generic property getter used by the digester. Other code should not need
* to use this.
*
* @param name The name of the property to get
*
* @return The value of the property converted to a string
*/
public String getProperty(String name) {
return endpoint.getProperty(name);
}
// ------------------------------- Properties managed by the ProtocolHandler
/**
* Name of MBean for the Global Request Processor.
*/
protected ObjectName rgOname = null;
public ObjectName getGlobalRequestProcessorMBeanName() {
return rgOname;
}
/**
* The adapter provides the link between the ProtocolHandler and the
* connector.
*/
protected Adapter adapter;
@Override
public void setAdapter(Adapter adapter) { this.adapter = adapter; }
@Override
public Adapter getAdapter() { return adapter; }
/**
* The maximum number of idle processors that will be retained in the cache
* and re-used with a subsequent request. The default is 200. A value of -1
* means unlimited. In the unlimited case, the theoretical maximum number of
* cached Processor objects is {@link #getMaxConnections()} although it will
* usually be closer to {@link #getMaxThreads()}.
*/
protected int processorCache = 200;
public int getProcessorCache() { return this.processorCache; }
public void setProcessorCache(int processorCache) {
this.processorCache = processorCache;
}
private String clientCertProvider = null;
/**
* When client certificate information is presented in a form other than
* instances of {@link java.security.cert.X509Certificate} it needs to be
* converted before it can be used and this property controls which JSSE
* provider is used to perform the conversion. For example it is used with
* the AJP connectors and with the
* {@link org.apache.catalina.valves.SSLValve}. If not specified, the
* default provider will be used.
*
* @return The name of the JSSE provider to use
*/
public String getClientCertProvider() { return clientCertProvider; }
public void setClientCertProvider(String s) { this.clientCertProvider = s; }
private int maxHeaderCount = 100;
public int getMaxHeaderCount() {
return maxHeaderCount;
}
public void setMaxHeaderCount(int maxHeaderCount) {
this.maxHeaderCount = maxHeaderCount;
}
@Override
public boolean isSendfileSupported() {
return endpoint.getUseSendfile();
}
@Override
public String getId() {
return endpoint.getId();
}
// ---------------------- Properties that are passed through to the EndPoint
@Override
public Executor getExecutor() { return endpoint.getExecutor(); }
@Override
public void setExecutor(Executor executor) {
endpoint.setExecutor(executor);
}
@Override
public ScheduledExecutorService getUtilityExecutor() { return endpoint.getUtilityExecutor(); }
@Override
public void setUtilityExecutor(ScheduledExecutorService utilityExecutor) {
endpoint.setUtilityExecutor(utilityExecutor);
}
public int getMaxThreads() { return endpoint.getMaxThreads(); }
public void setMaxThreads(int maxThreads) {
endpoint.setMaxThreads(maxThreads);
}
public int getMaxConnections() { return endpoint.getMaxConnections(); }
public void setMaxConnections(int maxConnections) {
endpoint.setMaxConnections(maxConnections);
}
public int getMinSpareThreads() { return endpoint.getMinSpareThreads(); }
public void setMinSpareThreads(int minSpareThreads) {
endpoint.setMinSpareThreads(minSpareThreads);
}
public int getThreadPriority() { return endpoint.getThreadPriority(); }
public void setThreadPriority(int threadPriority) {
endpoint.setThreadPriority(threadPriority);
}
public int getAcceptCount() { return endpoint.getAcceptCount(); }
public void setAcceptCount(int acceptCount) { endpoint.setAcceptCount(acceptCount); }
public boolean getTcpNoDelay() { return endpoint.getTcpNoDelay(); }
public void setTcpNoDelay(boolean tcpNoDelay) {
endpoint.setTcpNoDelay(tcpNoDelay);
}
public int getConnectionLinger() { return endpoint.getConnectionLinger(); }
public void setConnectionLinger(int connectionLinger) {
endpoint.setConnectionLinger(connectionLinger);
}
/**
* The time Tomcat will wait for a subsequent request before closing the
* connection. The default is {@link #getConnectionTimeout()}.
*
* @return The timeout in milliseconds
*/
public int getKeepAliveTimeout() { return endpoint.getKeepAliveTimeout(); }
public void setKeepAliveTimeout(int keepAliveTimeout) {
endpoint.setKeepAliveTimeout(keepAliveTimeout);
}
public InetAddress getAddress() { return endpoint.getAddress(); }
public void setAddress(InetAddress ia) {
endpoint.setAddress(ia);
}
public int getPort() { return endpoint.getPort(); }
public void setPort(int port) {
endpoint.setPort(port);
}
public int getPortOffset() { return endpoint.getPortOffset(); }
public void setPortOffset(int portOffset) {
endpoint.setPortOffset(portOffset);
}
public int getPortWithOffset() { return endpoint.getPortWithOffset(); }
public int getLocalPort() { return endpoint.getLocalPort(); }
/*
* When Tomcat expects data from the client, this is the time Tomcat will
* wait for that data to arrive before closing the connection.
*/
public int getConnectionTimeout() {
return endpoint.getConnectionTimeout();
}
public void setConnectionTimeout(int timeout) {
endpoint.setConnectionTimeout(timeout);
}
public long getConnectionCount() {
return endpoint.getConnectionCount();
}
public void setAcceptorThreadPriority(int threadPriority) {
endpoint.setAcceptorThreadPriority(threadPriority);
}
public int getAcceptorThreadPriority() {
return endpoint.getAcceptorThreadPriority();
}
// ---------------------------------------------------------- Public methods
public synchronized int getNameIndex() {
if (nameIndex == 0) {
nameIndex = nameCounter.incrementAndGet();
}
return nameIndex;
}
/**
* The name will be prefix-address-port if address is non-null and
* prefix-port if the address is null.
*
* @return A name for this protocol instance that is appropriately quoted
* for use in an ObjectName.
*/
public String getName() {
return ObjectName.quote(getNameInternal());
}
private String getNameInternal() {
StringBuilder name = new StringBuilder(getNamePrefix());
name.append('-');
String id = getId();
if (id != null) {
name.append(id);
} else {
if (getAddress() != null) {
name.append(getAddress().getHostAddress());
name.append('-');
}
int port = getPortWithOffset();
if (port == 0) {
// Auto binding is in use. Check if port is known
name.append("auto-");
name.append(getNameIndex());
port = getLocalPort();
if (port != -1) {
name.append('-');
name.append(port);
}
} else {
name.append(port);
}
}
return name.toString();
}
public void addWaitingProcessor(Processor processor) {
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractProtocol.waitingProcessor.add", processor));
}
waitingProcessors.add(processor);
}
public void removeWaitingProcessor(Processor processor) {
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractProtocol.waitingProcessor.remove", processor));
}
waitingProcessors.remove(processor);
}
/*
* Primarily for debugging and testing. Could be exposed via JMX if
* considered useful.
*/
public int getWaitingProcessorCount() {
return waitingProcessors.size();
}
// ----------------------------------------------- Accessors for sub-classes
protected AbstractEndpoint<S,?> getEndpoint() {
return endpoint;
}
protected Handler<S> getHandler() {
return handler;
}
protected void setHandler(Handler<S> handler) {
this.handler = handler;
}
// -------------------------------------------------------- Abstract methods
/**
* Concrete implementations need to provide access to their logger to be
* used by the abstract classes.
* @return the logger
*/
protected abstract Log getLog();
/**
* Obtain the prefix to be used when construction a name for this protocol
* handler. The name will be prefix-address-port.
* @return the prefix
*/
protected abstract String getNamePrefix();
/**
* Obtain the name of the protocol, (Http, Ajp, etc.). Used with JMX.
* @return the protocol name
*/
protected abstract String getProtocolName();
/**
* Find a suitable handler for the protocol negotiated
* at the network layer.
* @param name The name of the requested negotiated protocol.
* @return The instance where {@link UpgradeProtocol#getAlpnName()} matches
* the requested protocol
*/
protected abstract UpgradeProtocol getNegotiatedProtocol(String name);
/**
* Find a suitable handler for the protocol upgraded name specified. This
* is used for direct connection protocol selection.
* @param name The name of the requested negotiated protocol.
* @return The instance where {@link UpgradeProtocol#getAlpnName()} matches
* the requested protocol
*/
protected abstract UpgradeProtocol getUpgradeProtocol(String name);
/**
* Create and configure a new Processor instance for the current protocol
* implementation.
*
* @return A fully configured Processor instance that is ready to use
*/
protected abstract Processor createProcessor();
protected abstract Processor createUpgradeProcessor(
SocketWrapperBase<?> socket,
UpgradeToken upgradeToken);
// ----------------------------------------------------- JMX related methods
protected String domain;
protected ObjectName oname;
protected MBeanServer mserver;
public ObjectName getObjectName() {
return oname;
}
public String getDomain() {
return domain;
}
@Override
public ObjectName preRegister(MBeanServer server, ObjectName name)
throws Exception {
oname = name;
mserver = server;
domain = name.getDomain();
return name;
}
@Override
public void postRegister(Boolean registrationDone) {
// NOOP
}
@Override
public void preDeregister() throws Exception {
// NOOP
}
@Override
public void postDeregister() {
// NOOP
}
private ObjectName createObjectName() throws MalformedObjectNameException {
// Use the same domain as the connector
domain = getAdapter().getDomain();
if (domain == null) {
return null;
}
StringBuilder name = new StringBuilder(getDomain());
name.append(":type=ProtocolHandler,port=");
int port = getPortWithOffset();
if (port > 0) {
name.append(port);
} else {
name.append("auto-");
name.append(getNameIndex());
}
InetAddress address = getAddress();
if (address != null) {
name.append(",address=");
name.append(ObjectName.quote(address.getHostAddress()));
}
return new ObjectName(name.toString());
}
// ------------------------------------------------------- Lifecycle methods
/*
* NOTE: There is no maintenance of state or checking for valid transitions
* within this class. It is expected that the connector will maintain state
* and prevent invalid state transitions.
*/
@Override
public void init() throws Exception {
if (getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.init", getName()));
logPortOffset();
}
if (oname == null) {
// Component not pre-registered so register it
oname = createObjectName();
if (oname != null) {
Registry.getRegistry(null, null).registerComponent(this, oname, null);
}
}
if (this.domain != null) {
ObjectName rgOname = new ObjectName(domain + ":type=GlobalRequestProcessor,name=" + getName());
this.rgOname = rgOname;
Registry.getRegistry(null, null).registerComponent(
getHandler().getGlobal(), rgOname, null);
}
String endpointName = getName();
endpoint.setName(endpointName.substring(1, endpointName.length()-1));
endpoint.setDomain(domain);
endpoint.init();
}
@Override
public void start() throws Exception {
if (getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.start", getName()));
logPortOffset();
}
endpoint.start();
monitorFuture = getUtilityExecutor().scheduleWithFixedDelay(
() -> {
if (!isPaused()) {
startAsyncTimeout();
}
}, 0, 60, TimeUnit.SECONDS);
}
/**
* Note: The name of this method originated with the Servlet 3.0
* asynchronous processing but evolved over time to represent a timeout that
* is triggered independently of the socket read/write timeouts.
*/
protected void startAsyncTimeout() {
if (timeoutFuture == null || timeoutFuture.isDone()) {
if (timeoutFuture != null && timeoutFuture.isDone()) {
// There was an error executing the scheduled task, get it and log it
try {
timeoutFuture.get();
} catch (InterruptedException | ExecutionException e) {
getLog().error(sm.getString("abstractProtocolHandler.asyncTimeoutError"), e);
}
}
timeoutFuture = getUtilityExecutor().scheduleAtFixedRate(
() -> {
long now = System.currentTimeMillis();
for (Processor processor : waitingProcessors) {
processor.timeoutAsync(now);
}
}, 1, 1, TimeUnit.SECONDS);
}
}
protected void stopAsyncTimeout() {
if (timeoutFuture != null) {
timeoutFuture.cancel(false);
timeoutFuture = null;
}
}
@Override
public void pause() throws Exception {
if (getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.pause", getName()));
}
stopAsyncTimeout();
endpoint.pause();
}
public boolean isPaused() {
return endpoint.isPaused();
}
@Override
public void resume() throws Exception {
if(getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.resume", getName()));
}
endpoint.resume();
startAsyncTimeout();
}
@Override
public void stop() throws Exception {
if(getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.stop", getName()));
logPortOffset();
}
if (monitorFuture != null) {
monitorFuture.cancel(true);
monitorFuture = null;
}
stopAsyncTimeout();
// Timeout any waiting processor
for (Processor processor : waitingProcessors) {
processor.timeoutAsync(-1);
}
endpoint.stop();
}
@Override
public void destroy() throws Exception {
if(getLog().isInfoEnabled()) {
getLog().info(sm.getString("abstractProtocolHandler.destroy", getName()));
logPortOffset();
}
try {
endpoint.destroy();
} finally {
if (oname != null) {
if (mserver == null) {
Registry.getRegistry(null, null).unregisterComponent(oname);
} else {
// Possibly registered with a different MBeanServer
try {
mserver.unregisterMBean(oname);
} catch (MBeanRegistrationException | InstanceNotFoundException e) {
getLog().info(sm.getString("abstractProtocol.mbeanDeregistrationFailed",
oname, mserver));
}
}
}
ObjectName rgOname = getGlobalRequestProcessorMBeanName();
if (rgOname != null) {
Registry.getRegistry(null, null).unregisterComponent(rgOname);
}
}
}
@Override
public void closeServerSocketGraceful() {
endpoint.closeServerSocketGraceful();
}
@Override
public long awaitConnectionsClose(long waitMillis) {
getLog().info(sm.getString("abstractProtocol.closeConnectionsAwait",
Long.valueOf(waitMillis), getName()));
return endpoint.awaitConnectionsClose(waitMillis);
}
private void logPortOffset() {
if (getPort() != getPortWithOffset()) {
getLog().info(sm.getString("abstractProtocolHandler.portOffset", getName(),
String.valueOf(getPort()), String.valueOf(getPortOffset())));
}
}
// ------------------------------------------- Connection handler base class
protected static class ConnectionHandler<S> implements AbstractEndpoint.Handler<S> {
private final AbstractProtocol<S> proto;
private final RequestGroupInfo global = new RequestGroupInfo();
private final AtomicLong registerCount = new AtomicLong(0);
private final RecycledProcessors recycledProcessors = new RecycledProcessors(this);
public ConnectionHandler(AbstractProtocol<S> proto) {
this.proto = proto;
}
protected AbstractProtocol<S> getProtocol() {
return proto;
}
protected Log getLog() {
return getProtocol().getLog();
}
@Override
public Object getGlobal() {
return global;
}
@Override
public void recycle() {
recycledProcessors.clear();
}
@Override
public SocketState process(SocketWrapperBase<S> wrapper, SocketEvent status) {
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.process",
wrapper.getSocket(), status));
}
if (wrapper == null) {
// Nothing to do. Socket has been closed.
return SocketState.CLOSED;
}
S socket = wrapper.getSocket();
Processor processor = (Processor) wrapper.getCurrentProcessor();
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.connectionsGet",
processor, socket));
}
// Timeouts are calculated on a dedicated thread and then
// dispatched. Because of delays in the dispatch process, the
// timeout may no longer be required. Check here and avoid
// unnecessary processing.
if (SocketEvent.TIMEOUT == status &&
(processor == null ||
!processor.isAsync() && !processor.isUpgrade() ||
processor.isAsync() && !processor.checkAsyncTimeoutGeneration())) {
// This is effectively a NO-OP
return SocketState.OPEN;
}
if (processor != null) {
// Make sure an async timeout doesn't fire
getProtocol().removeWaitingProcessor(processor);
} else if (status == SocketEvent.DISCONNECT || status == SocketEvent.ERROR) {
// Nothing to do. Endpoint requested a close and there is no
// longer a processor associated with this socket.
return SocketState.CLOSED;
}
ContainerThreadMarker.set();
try {
if (processor == null) {
String negotiatedProtocol = wrapper.getNegotiatedProtocol();
// OpenSSL typically returns null whereas JSSE typically
// returns "" when no protocol is negotiated
if (negotiatedProtocol != null && negotiatedProtocol.length() > 0) {
UpgradeProtocol upgradeProtocol = getProtocol().getNegotiatedProtocol(negotiatedProtocol);
if (upgradeProtocol != null) {
processor = upgradeProtocol.getProcessor(wrapper, getProtocol().getAdapter());
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.processorCreate", processor));
}
} else if (negotiatedProtocol.equals("http/1.1")) {
// Explicitly negotiated the default protocol.
// Obtain a processor below.
} else {
// TODO:
// OpenSSL 1.0.2's ALPN callback doesn't support
// failing the handshake with an error if no
// protocol can be negotiated. Therefore, we need to
// fail the connection here. Once this is fixed,
// replace the code below with the commented out
// block.
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.negotiatedProcessor.fail",
negotiatedProtocol));
}
return SocketState.CLOSED;
/*
* To replace the code above once OpenSSL 1.1.0 is
* used.
// Failed to create processor. This is a bug.
throw new IllegalStateException(sm.getString(
"abstractConnectionHandler.negotiatedProcessor.fail",
negotiatedProtocol));
*/
}
}
}
if (processor == null) {
processor = recycledProcessors.pop();
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.processorPop", processor));
}
}
if (processor == null) {
processor = getProtocol().createProcessor();
register(processor);
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.processorCreate", processor));
}
}
processor.setSslSupport(
wrapper.getSslSupport(getProtocol().getClientCertProvider()));
// Associate the processor with the connection
wrapper.setCurrentProcessor(processor);
SocketState state = SocketState.CLOSED;
do {
state = processor.process(wrapper, status);
if (state == SocketState.UPGRADING) {
// Get the HTTP upgrade handler
UpgradeToken upgradeToken = processor.getUpgradeToken();
// Restore leftover input to the wrapper so the upgrade
// processor can process it.
ByteBuffer leftOverInput = processor.getLeftoverInput();
wrapper.unRead(leftOverInput);
if (upgradeToken == null) {
// Assume direct HTTP/2 connection
UpgradeProtocol upgradeProtocol = getProtocol().getUpgradeProtocol("h2c");
if (upgradeProtocol != null) {
// Release the Http11 processor to be re-used
release(processor);
// Create the upgrade processor
processor = upgradeProtocol.getProcessor(wrapper, getProtocol().getAdapter());
// Associate with the processor with the connection
wrapper.setCurrentProcessor(processor);
} else {
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString(
"abstractConnectionHandler.negotiatedProcessor.fail",
"h2c"));
}
// Exit loop and trigger appropriate clean-up
state = SocketState.CLOSED;
}
} else {
HttpUpgradeHandler httpUpgradeHandler = upgradeToken.getHttpUpgradeHandler();
// Release the Http11 processor to be re-used
release(processor);
// Create the upgrade processor
processor = getProtocol().createUpgradeProcessor(wrapper, upgradeToken);
if (getLog().isDebugEnabled()) {
getLog().debug(sm.getString("abstractConnectionHandler.upgradeCreate",
processor, wrapper));
}
// Associate with the processor with the connection
wrapper.setCurrentProcessor(processor);
// Initialise the upgrade handler (which may trigger
// some IO using the new protocol which is why the lines
// above are necessary)
// This cast should be safe. If it fails the error
// handling for the surrounding try/catch will deal with
// it.
if (upgradeToken.getInstanceManager() == null) {
httpUpgradeHandler.init((WebConnection) processor);
} else {
ClassLoader oldCL = upgradeToken.getContextBind().bind(false, null);
try {
httpUpgradeHandler.init((WebConnection) processor);
} finally {
upgradeToken.getContextBind().unbind(false, oldCL);
}
}
if (httpUpgradeHandler instanceof InternalHttpUpgradeHandler) {
if (((InternalHttpUpgradeHandler) httpUpgradeHandler).hasAsyncIO()) {
// The handler will initiate all further I/O
state = SocketState.UPGRADED;
}
}
}
}
} while ( state == SocketState.UPGRADING);
if (state == SocketState.LONG) {
// In the middle of processing a request/response. Keep the
// socket associated with the processor. Exact requirements
// depend on type of long poll
longPoll(wrapper, processor);
if (processor.isAsync()) {
getProtocol().addWaitingProcessor(processor);
}
} else if (state == SocketState.OPEN) {
// In keep-alive but between requests. OK to recycle
// processor. Continue to poll for the next request.
wrapper.setCurrentProcessor(null);
release(processor);
wrapper.registerReadInterest();
} else if (state == SocketState.SENDFILE) {
// Sendfile in progress. If it fails, the socket will be
// closed. If it works, the socket either be added to the
// poller (or equivalent) to await more data or processed
// if there are any pipe-lined requests remaining.
} else if (state == SocketState.UPGRADED) {
// Don't add sockets back to the poller if this was a
// non-blocking write otherwise the poller may trigger
// multiple read events which may lead to thread starvation
// in the connector. The write() method will add this socket
// to the poller if necessary.
if (status != SocketEvent.OPEN_WRITE) {
longPoll(wrapper, processor);
getProtocol().addWaitingProcessor(processor);
}
} else if (state == SocketState.SUSPENDED) {
// Don't add sockets back to the poller.
// The resumeProcessing() method will add this socket
// to the poller.
} else {
// Connection closed. OK to recycle the processor.
// Processors handling upgrades require additional clean-up
// before release.
wrapper.setCurrentProcessor(null);
if (processor.isUpgrade()) {
UpgradeToken upgradeToken = processor.getUpgradeToken();
HttpUpgradeHandler httpUpgradeHandler = upgradeToken.getHttpUpgradeHandler();
InstanceManager instanceManager = upgradeToken.getInstanceManager();
if (instanceManager == null) {
httpUpgradeHandler.destroy();
} else {
ClassLoader oldCL = upgradeToken.getContextBind().bind(false, null);
try {
httpUpgradeHandler.destroy();
} finally {
try {
instanceManager.destroyInstance(httpUpgradeHandler);
} catch (Throwable e) {
ExceptionUtils.handleThrowable(e);
getLog().error(sm.getString("abstractConnectionHandler.error"), e);
}
upgradeToken.getContextBind().unbind(false, oldCL);
}
}
}
release(processor);
}
return state;
} catch(java.net.SocketException e) {
// SocketExceptions are normal
getLog().debug(sm.getString(
"abstractConnectionHandler.socketexception.debug"), e);
} catch (java.io.IOException e) {
// IOExceptions are normal
getLog().debug(sm.getString(
"abstractConnectionHandler.ioexception.debug"), e);
} catch (ProtocolException e) {
// Protocol exceptions normally mean the client sent invalid or
// incomplete data.
getLog().debug(sm.getString(
"abstractConnectionHandler.protocolexception.debug"), e);
}
// Future developers: if you discover any other
// rare-but-nonfatal exceptions, catch them here, and log as
// above.
catch (OutOfMemoryError oome) {
// Try and handle this here to give Tomcat a chance to close the
// connection and prevent clients waiting until they time out.
// Worst case, it isn't recoverable and the attempt at logging
// will trigger another OOME.
getLog().error(sm.getString("abstractConnectionHandler.oome"), oome);
} catch (Throwable e) {
ExceptionUtils.handleThrowable(e);
// any other exception or error is odd. Here we log it
// with "ERROR" level, so it will show up even on
// less-than-verbose logs.
getLog().error(sm.getString("abstractConnectionHandler.error"), e);
} finally {
ContainerThreadMarker.clear();
}
// Make sure socket/processor is removed from the list of current
// connections
wrapper.setCurrentProcessor(null);
release(processor);
return SocketState.CLOSED;
}
protected void longPoll(SocketWrapperBase<?> socket, Processor processor) {
if (!processor.isAsync()) {
// This is currently only used with HTTP
// Either:
// - this is an upgraded connection
// - the request line/headers have not been completely
// read
socket.registerReadInterest();
}
}
/**
* Expected to be used by the handler once the processor is no longer
* required.
*
* @param processor Processor being released (that was associated with
* the socket)
*/
private void release(Processor processor) {
if (processor != null) {
processor.recycle();
if (processor.isUpgrade()) {
// While UpgradeProcessor instances should not normally be
// present in waitingProcessors there are various scenarios
// where this can happen. E.g.:
// - when AsyncIO is used
// - WebSocket I/O error on non-container thread
// Err on the side of caution and always try and remove any
// UpgradeProcessor instances from waitingProcessors
getProtocol().removeWaitingProcessor(processor);
} else {
// After recycling, only instances of UpgradeProcessorBase
// will return true for isUpgrade().
// Instances of UpgradeProcessorBase should not be added to
// recycledProcessors since that pool is only for AJP or
// HTTP processors
recycledProcessors.push(processor);
if (getLog().isDebugEnabled()) {
getLog().debug("Pushed Processor [" + processor + "]");
}
}
}
}
/**
* Expected to be used by the Endpoint to release resources on socket
* close, errors etc.
*/
@Override
public void release(SocketWrapperBase<S> socketWrapper) {
Processor processor = (Processor) socketWrapper.getCurrentProcessor();
socketWrapper.setCurrentProcessor(null);
release(processor);
}
protected void register(Processor processor) {
if (getProtocol().getDomain() != null) {
synchronized (this) {
try {
long count = registerCount.incrementAndGet();
RequestInfo rp =
processor.getRequest().getRequestProcessor();
rp.setGlobalProcessor(global);
ObjectName rpName = new ObjectName(
getProtocol().getDomain() +
":type=RequestProcessor,worker="
+ getProtocol().getName() +
",name=" + getProtocol().getProtocolName() +
"Request" + count);
if (getLog().isDebugEnabled()) {
getLog().debug("Register [" + processor + "] as [" + rpName + "]");
}
Registry.getRegistry(null, null).registerComponent(rp,
rpName, null);
rp.setRpName(rpName);
} catch (Exception e) {
getLog().warn(sm.getString("abstractProtocol.processorRegisterError"), e);
}
}
}
}
protected void unregister(Processor processor) {
if (getProtocol().getDomain() != null) {
synchronized (this) {
try {
Request r = processor.getRequest();
if (r == null) {
// Probably an UpgradeProcessor
return;
}
RequestInfo rp = r.getRequestProcessor();
rp.setGlobalProcessor(null);
ObjectName rpName = rp.getRpName();
if (getLog().isDebugEnabled()) {
getLog().debug("Unregister [" + rpName + "]");
}
Registry.getRegistry(null, null).unregisterComponent(
rpName);
rp.setRpName(null);
} catch (Exception e) {
getLog().warn(sm.getString("abstractProtocol.processorUnregisterError"), e);
}
}
}
}
@Override
public final void pause() {
/*
* Inform all the processors associated with current connections
* that the endpoint is being paused. Most won't care. Those
* processing multiplexed streams may wish to take action. For
* example, HTTP/2 may wish to stop accepting new streams.
*
* Note that even if the endpoint is resumed, there is (currently)
* no API to inform the Processors of this.
*/
for (SocketWrapperBase<S> wrapper : proto.getEndpoint().getConnections()) {
Processor processor = (Processor) wrapper.getCurrentProcessor();
if (processor != null) {
processor.pause();
}
}
}
}
protected static class RecycledProcessors extends SynchronizedStack<Processor> {
private final transient ConnectionHandler<?> handler;
protected final AtomicInteger size = new AtomicInteger(0);
public RecycledProcessors(ConnectionHandler<?> handler) {
this.handler = handler;
}
@SuppressWarnings("sync-override") // Size may exceed cache size a bit
@Override
public boolean push(Processor processor) {
int cacheSize = handler.getProtocol().getProcessorCache();
boolean offer = cacheSize == -1 ? true : size.get() < cacheSize;
//avoid over growing our cache or add after we have stopped
boolean result = false;
if (offer) {
result = super.push(processor);
if (result) {
size.incrementAndGet();
}
}
if (!result) {
handler.unregister(processor);
}
return result;
}
@SuppressWarnings("sync-override") // OK if size is too big briefly
@Override
public Processor pop() {
Processor result = super.pop();
if (result != null) {
size.decrementAndGet();
}
return result;
}
@Override
public synchronized void clear() {
Processor next = pop();
while (next != null) {
handler.unregister(next);
next = pop();
}
super.clear();
size.set(0);
}
}
}
|
Avoid using deprecated method
|
java/org/apache/coyote/AbstractProtocol.java
|
Avoid using deprecated method
|
|
Java
|
apache-2.0
|
9c7dc845560c97015c7229f500d3cae1ba22a7b5
| 0
|
martin7890/jitsi,cobratbq/jitsi,jibaro/jitsi,jibaro/jitsi,ibauersachs/jitsi,ringdna/jitsi,jitsi/jitsi,iant-gmbh/jitsi,tuijldert/jitsi,bhatvv/jitsi,pplatek/jitsi,mckayclarey/jitsi,ibauersachs/jitsi,dkcreinoso/jitsi,HelioGuilherme66/jitsi,iant-gmbh/jitsi,dkcreinoso/jitsi,marclaporte/jitsi,dkcreinoso/jitsi,pplatek/jitsi,HelioGuilherme66/jitsi,jitsi/jitsi,iant-gmbh/jitsi,ringdna/jitsi,iant-gmbh/jitsi,HelioGuilherme66/jitsi,gpolitis/jitsi,bhatvv/jitsi,jitsi/jitsi,martin7890/jitsi,ibauersachs/jitsi,damencho/jitsi,HelioGuilherme66/jitsi,iant-gmbh/jitsi,ringdna/jitsi,bhatvv/jitsi,459below/jitsi,damencho/jitsi,marclaporte/jitsi,tuijldert/jitsi,ibauersachs/jitsi,level7systems/jitsi,tuijldert/jitsi,cobratbq/jitsi,bebo/jitsi,pplatek/jitsi,procandi/jitsi,procandi/jitsi,procandi/jitsi,level7systems/jitsi,marclaporte/jitsi,jitsi/jitsi,dkcreinoso/jitsi,jitsi/jitsi,tuijldert/jitsi,level7systems/jitsi,marclaporte/jitsi,martin7890/jitsi,ringdna/jitsi,Metaswitch/jitsi,mckayclarey/jitsi,mckayclarey/jitsi,level7systems/jitsi,laborautonomo/jitsi,damencho/jitsi,dkcreinoso/jitsi,Metaswitch/jitsi,laborautonomo/jitsi,459below/jitsi,gpolitis/jitsi,laborautonomo/jitsi,ringdna/jitsi,cobratbq/jitsi,bebo/jitsi,pplatek/jitsi,marclaporte/jitsi,bhatvv/jitsi,HelioGuilherme66/jitsi,bebo/jitsi,Metaswitch/jitsi,459below/jitsi,procandi/jitsi,gpolitis/jitsi,jibaro/jitsi,bebo/jitsi,ibauersachs/jitsi,bebo/jitsi,cobratbq/jitsi,jibaro/jitsi,459below/jitsi,martin7890/jitsi,gpolitis/jitsi,pplatek/jitsi,cobratbq/jitsi,459below/jitsi,tuijldert/jitsi,mckayclarey/jitsi,laborautonomo/jitsi,damencho/jitsi,gpolitis/jitsi,laborautonomo/jitsi,mckayclarey/jitsi,damencho/jitsi,procandi/jitsi,martin7890/jitsi,Metaswitch/jitsi,jibaro/jitsi,level7systems/jitsi,bhatvv/jitsi
|
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.gui.main.call.conference;
import java.awt.*;
import java.util.*;
import javax.swing.*;
import net.java.sip.communicator.impl.gui.main.call.*;
import net.java.sip.communicator.impl.gui.main.call.CallPeerAdapter;
import net.java.sip.communicator.impl.gui.utils.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.service.protocol.event.*;
import net.java.sip.communicator.util.skin.*;
import net.java.sip.communicator.util.swing.*;
/**
*
*
* @author Yana Stamcheva
*/
public class ConferenceFocusPanel
extends TransparentPanel
implements ConferenceCallPeerRenderer,
Skinnable
{
/**
* The peer corresponding to the focus.
*/
private final CallPeer focusPeer;
/**
* The renderer corresponding to the parent call call.
*/
private final CallRenderer callRenderer;
/**
* The call panel.
*/
private final CallPanel callPanel;
/**
* A mapping of a member and its renderer.
*/
private final Map<ConferenceMember, ConferenceMemberPanel>
conferenceMembersPanels
= new Hashtable<ConferenceMember, ConferenceMemberPanel>();
private ConferencePeerPanel focusPeerPanel;
/**
* Creates an instance of <tt>ConferenceFocusPanel</tt> by specifying the
* parent call renderer, the call panel and the peer represented by this
* conference focus panel.
*
* @param callRenderer the parent call renderer
* @param callPanel the call panel
* @param callPeer the peer represented by this focus panel
*/
public ConferenceFocusPanel(CallRenderer callRenderer,
CallPanel callPanel,
CallPeer callPeer)
{
this.focusPeer = callPeer;
this.callRenderer = callRenderer;
this.callPanel = callPanel;
this.setLayout(new GridBagLayout());
// First add the focus peer.
addFocusPeerPanel();
for (ConferenceMember member : callPeer.getConferenceMembers())
{
addConferenceMemberPanel(member);
}
}
/**
* Adds the focus peer panel.
*/
public void addFocusPeerPanel()
{
focusPeerPanel
= new ConferencePeerPanel(callRenderer, callPanel, focusPeer);
GridBagConstraints constraints = new GridBagConstraints();
// Add the member panel to this container
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 0;
constraints.gridy = 0;
constraints.weightx = 1;
constraints.weighty = 0;
constraints.insets = new Insets(0, 0, 3, 0);
this.add(focusPeerPanel, constraints);
}
/**
* Adds a <tt>ConferenceMemberPanel</tt> for a given
* <tt>ConferenceMember</tt>.
*
* @param member the <tt>ConferenceMember</tt> that will correspond to the
* panel to add.
*/
public void addConferenceMemberPanel(ConferenceMember member)
{
String localUserAddress
= focusPeer.getProtocolProvider().getAccountID().getAccountAddress();
boolean isLocalMember
= addressesAreEqual(member.getAddress(), localUserAddress);
// We don't want to add the local member to the list of members.
if (isLocalMember)
return;
if (addressesAreEqual(member.getAddress(), focusPeer.getAddress()))
return;
// It's already there.
if (conferenceMembersPanels.containsKey(member))
return;
ConferenceMemberPanel memberPanel
= new ConferenceMemberPanel(callRenderer, member);
member.addPropertyChangeListener(memberPanel);
// Map the conference member to the created member panel.
conferenceMembersPanels.put(member, memberPanel);
GridBagConstraints constraints = new GridBagConstraints();
// Add the member panel to this container
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 0;
constraints.gridy = getComponentCount();
constraints.weightx = 1;
constraints.weighty = 0;
constraints.insets = new Insets(0, 0, 3, 0);
this.add(memberPanel, constraints);
initSecuritySettings();
}
/**
* Removes the <tt>ConferenceMemberPanel</tt> corresponding to the given
* <tt>member</tt>.
*
* @param member the <tt>ConferenceMember</tt>, which panel to remove
*/
public void removeConferenceMemberPanel(ConferenceMember member)
{
Component memberPanel = conferenceMembersPanels.get(member);
if (memberPanel != null)
{
int i = 0;
this.remove(memberPanel);
conferenceMembersPanels.remove(member);
if (!addressesAreEqual(member.getAddress(), focusPeer.getAddress()))
member.removePropertyChangeListener(
(ConferenceMemberPanel) memberPanel);
for(Map.Entry<ConferenceMember, ConferenceMemberPanel> m :
conferenceMembersPanels.entrySet())
{
GridBagConstraints constraints = new GridBagConstraints();
Component mV = m.getValue();
this.remove(mV);
// Add again the member panel to this container
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 0;
constraints.gridy = i;
constraints.weightx = 1;
constraints.weighty = 0;
constraints.insets = new Insets(0, 0, 3, 0);
this.add(mV, constraints);
i++;
}
}
}
/**
* Overrides {@link JComponent#paintComponent(Graphics)} in order to
* customize the background of this panel.
*
* @param g the <tt>Graphics</tt> object used for painting
*/
@Override
public void paintComponent(Graphics g)
{
super.paintComponent(g);
g = g.create();
try
{
AntialiasingManager.activateAntialiasing(g);
g.setColor(Color.LIGHT_GRAY);
g.fillRect(0, 0, this.getWidth(), this.getHeight());
g.setColor(Color.DARK_GRAY);
g.drawLine(0, 0, getWidth(), 0);
g.drawLine(0, getHeight() - 1, getWidth(), getHeight() - 1);
}
finally
{
g.dispose();
}
}
/**
* Reloads default avatar icon.
*/
public void loadSkin() {}
/**
* Sets the name of the peer.
*
* @param name the name of the peer
*/
public void setPeerName(String name)
{
focusPeerPanel.setPeerName(name);
}
/**
* Sets the <tt>image</tt> of the peer.
*
* @param image the image to set
*/
public void setPeerImage(byte[] image)
{
focusPeerPanel.setPeerImage(image);
}
/**
* Sets the state of the contained call peer by specifying the
* state name.
*
* @param state the state of the contained call peer
*/
public void setPeerState(String state)
{
focusPeerPanel.setPeerState(state);
}
/**
* Sets the reason of a call failure if one occurs. The renderer should
* display this reason to the user.
*
* @param reason the reason of the error to set
*/
public void setErrorReason(String reason)
{
focusPeerPanel.setErrorReason(reason);
}
/**
* Sets the mute property value.
*
* @param isMute indicates if the call with this peer is
* muted
*/
public void setMute(boolean isMute)
{
focusPeerPanel.setMute(isMute);
}
/**
* Sets the "on hold" property value.
*
* @param isOnHold indicates if the call with this peer is put on hold
*/
public void setOnHold(boolean isOnHold)
{
focusPeerPanel.setOnHold(isOnHold);
}
/**
* Indicates that the security is turned on.
*
* @param evt Details about the event that caused this message.
*/
public void securityOn(CallPeerSecurityOnEvent evt)
{
focusPeerPanel.securityOn(evt);
for (ConferenceMemberPanel member : conferenceMembersPanels.values())
{
member.securityOn(evt);
}
}
/**
* Indicates that the security is turned off.
*
* @param evt Details about the event that caused this message.
*/
public void securityOff(CallPeerSecurityOffEvent evt)
{
focusPeerPanel.securityOff(evt);
for (ConferenceMemberPanel member : conferenceMembersPanels.values())
{
member.securityOff(evt);
}
}
/**
* Sets the call peer adapter that manages all related listeners.
*
* @param adapter the call peer adapter
*/
public void setCallPeerAdapter(CallPeerAdapter adapter)
{
focusPeerPanel.setCallPeerAdapter(adapter);
}
/**
* Returns the call peer adapter that manages all related listeners.
*
* @return the call peer adapter
*/
public CallPeerAdapter getCallPeerAdapter()
{
return focusPeerPanel.getCallPeerAdapter();
}
/**
* Prints the given DTMG character through this <tt>CallPeerRenderer</tt>.
*
* @param dtmfChar the DTMF char to print
*/
public void printDTMFTone(char dtmfChar)
{
focusPeerPanel.printDTMFTone(dtmfChar);
}
/**
* Returns the parent <tt>CallPanel</tt> containing this renderer.
*
* @return the parent <tt>CallPanel</tt> containing this renderer
*/
public CallPanel getCallPanel()
{
return callPanel;
}
/**
* Returns the parent call renderer.
*
* @return the parent call renderer
*/
public CallRenderer getCallRenderer()
{
return callRenderer;
}
/**
* Shows/hides the local video component.
*
* @param isVisible <tt>true</tt> to show the local video, <tt>false</tt> -
* otherwise
*/
public void setLocalVideoVisible(boolean isVisible)
{
focusPeerPanel.setLocalVideoVisible(isVisible);
}
/**
* Indicates if the local video component is currently visible.
*
* @return <tt>true</tt> if the local video component is currently visible,
* <tt>false</tt> - otherwise
*/
public boolean isLocalVideoVisible()
{
return focusPeerPanel.isLocalVideoVisible();
}
/**
* Returns the component associated with this renderer.
*
* @return the component associated with this renderer
*/
public Component getComponent()
{
return this;
}
/**
* Indicates that the given conference member has been added to the given
* peer.
*
* @param callPeer the parent call peer
* @param conferenceMember the member that was added
*/
public void conferenceMemberAdded( CallPeer callPeer,
ConferenceMember conferenceMember)
{
addConferenceMemberPanel(conferenceMember);
callPanel.refreshContainer();
}
/**
* Indicates that the given conference member has been removed from the
* given peer.
*
* @param callPeer the parent call peer
* @param conferenceMember the member that was removed
*/
public void conferenceMemberRemoved(CallPeer callPeer,
ConferenceMember conferenceMember)
{
removeConferenceMemberPanel(conferenceMember);
callPanel.refreshContainer();
}
/**
* Determines whether two specific addresses refer to one and the same
* peer/resource/contact.
* <p>
* <b>Warning</b>: Use the functionality sparingly because it assumes that
* an unspecified service is equal to any service.
* </p>
*
* @param a one of the addresses to be compared
* @param b the other address to be compared to <tt>a</tt>
* @return <tt>true</tt> if <tt>a</tt> and <tt>b</tt> name one and the same
* peer/resource/contact; <tt>false</tt>, otherwise
*/
private static boolean addressesAreEqual(String a, String b)
{
if (a.equals(b))
return true;
//TODO: this doesn't belong here
if(a.startsWith("sip:"))
a = a.substring(4);
if(a.startsWith("sips:"))
a = a.substring(5);
if(b.startsWith("sip:"))
b = b.substring(4);
if(b.startsWith("sips:"))
b = b.substring(5);
int aServiceBegin = a.indexOf('@');
String aUserID;
String aService;
if (aServiceBegin > -1)
{
aUserID = a.substring(0, aServiceBegin);
int slashIndex = a.indexOf("/");
if (slashIndex > 0)
aService = a.substring(aServiceBegin + 1, slashIndex);
else
aService = a.substring(aServiceBegin + 1);
}
else
{
aUserID = a;
aService = null;
}
int bServiceBegin = b.indexOf('@');
String bUserID;
String bService;
if (bServiceBegin > -1)
{
bUserID = b.substring(0, bServiceBegin);
int slashIndex = b.indexOf("/");
if (slashIndex > 0)
bService = b.substring(aServiceBegin + 1, slashIndex);
else
bService = b.substring(aServiceBegin + 1);
}
else
{
bUserID = b;
bService = null;
}
boolean userIDsAreEqual;
if ((aUserID == null) || (aUserID.length() < 1))
userIDsAreEqual = ((bUserID == null) || (bUserID.length() < 1));
else
userIDsAreEqual = aUserID.equals(bUserID);
if (!userIDsAreEqual)
return false;
boolean servicesAreEqual;
/*
* It's probably a veeery long shot but it's assumed here that an
* unspecified service is equal to any service. Such a case is, for
* example, RegistrarLess SIP.
*/
if (((aService == null) || (aService.length() < 1))
|| ((bService == null) || (bService.length() < 1)))
servicesAreEqual = true;
else
servicesAreEqual = aService.equals(bService);
return servicesAreEqual;
}
/**
* Returns null to indicate that there's no conference member sound level
* listener registered with this focus panel.
*/
public ConferenceMembersSoundLevelListener
getConferenceMembersSoundLevelListener()
{
return null;
}
/**
* Returns null to indicate that there's no stream sound level listener
* registered with this focus panel.
*/
public SoundLevelListener getStreamSoundLevelListener()
{
return null;
}
/**
* Initializes security.
*/
private void initSecuritySettings()
{
CallPeerSecurityStatusEvent securityEvent
= focusPeer.getCurrentSecuritySettings();
if (securityEvent instanceof CallPeerSecurityOnEvent)
{
securityOn((CallPeerSecurityOnEvent) securityEvent);
NotificationManager.fireNotification(
NotificationManager.CALL_SECURITY_ON);
}
}
}
|
src/net/java/sip/communicator/impl/gui/main/call/conference/ConferenceFocusPanel.java
|
/*
* SIP Communicator, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*/
package net.java.sip.communicator.impl.gui.main.call.conference;
import java.awt.*;
import java.util.*;
import javax.swing.*;
import net.java.sip.communicator.impl.gui.main.call.*;
import net.java.sip.communicator.impl.gui.main.call.CallPeerAdapter;
import net.java.sip.communicator.impl.gui.utils.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.service.protocol.event.*;
import net.java.sip.communicator.util.skin.*;
import net.java.sip.communicator.util.swing.*;
/**
*
*
* @author Yana Stamcheva
*/
public class ConferenceFocusPanel
extends TransparentPanel
implements ConferenceCallPeerRenderer,
Skinnable
{
/**
* The peer corresponding to the focus.
*/
private final CallPeer focusPeer;
/**
* The renderer corresponding to the parent call call.
*/
private final CallRenderer callRenderer;
/**
* The call panel.
*/
private final CallPanel callPanel;
/**
* A mapping of a member and its renderer.
*/
private final Map<ConferenceMember, ConferenceMemberPanel>
conferenceMembersPanels
= new Hashtable<ConferenceMember, ConferenceMemberPanel>();
private ConferencePeerPanel focusPeerPanel;
/**
* Creates an instance of <tt>ConferenceFocusPanel</tt> by specifying the
* parent call renderer, the call panel and the peer represented by this
* conference focus panel.
*
* @param callRenderer the parent call renderer
* @param callPanel the call panel
* @param callPeer the peer represented by this focus panel
*/
public ConferenceFocusPanel(CallRenderer callRenderer,
CallPanel callPanel,
CallPeer callPeer)
{
this.focusPeer = callPeer;
this.callRenderer = callRenderer;
this.callPanel = callPanel;
this.setLayout(new GridBagLayout());
// First add the focus peer.
addFocusPeerPanel();
for (ConferenceMember member : callPeer.getConferenceMembers())
{
addConferenceMemberPanel(member);
}
}
/**
* Adds the focus peer panel.
*/
public void addFocusPeerPanel()
{
focusPeerPanel
= new ConferencePeerPanel(callRenderer, callPanel, focusPeer);
GridBagConstraints constraints = new GridBagConstraints();
// Add the member panel to this container
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 0;
constraints.gridy = 0;
constraints.weightx = 1;
constraints.weighty = 0;
constraints.insets = new Insets(0, 0, 3, 0);
this.add(focusPeerPanel, constraints);
}
/**
* Adds a <tt>ConferenceMemberPanel</tt> for a given
* <tt>ConferenceMember</tt>.
*
* @param member the <tt>ConferenceMember</tt> that will correspond to the
* panel to add.
*/
public void addConferenceMemberPanel(ConferenceMember member)
{
String localUserAddress
= focusPeer.getProtocolProvider().getAccountID().getAccountAddress();
boolean isLocalMember
= addressesAreEqual(member.getAddress(), localUserAddress);
// We don't want to add the local member to the list of members.
if (isLocalMember)
return;
if (addressesAreEqual(member.getAddress(), focusPeer.getAddress()))
return;
// It's already there.
if (conferenceMembersPanels.containsKey(member))
return;
ConferenceMemberPanel memberPanel
= new ConferenceMemberPanel(callRenderer, member);
member.addPropertyChangeListener(
(ConferenceMemberPanel) memberPanel);
// Map the conference member to the created member panel.
conferenceMembersPanels.put(member, memberPanel);
GridBagConstraints constraints = new GridBagConstraints();
// Add the member panel to this container
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 0;
constraints.gridy = getComponentCount();
constraints.weightx = 1;
constraints.weighty = 0;
constraints.insets = new Insets(0, 0, 3, 0);
this.add(memberPanel, constraints);
initSecuritySettings();
}
/**
* Removes the <tt>ConferenceMemberPanel</tt> corresponding to the given
* <tt>member</tt>.
*
* @param member the <tt>ConferenceMember</tt>, which panel to remove
*/
public void removeConferenceMemberPanel(ConferenceMember member)
{
Component memberPanel = conferenceMembersPanels.get(member);
if (memberPanel != null)
{
int i = 0;
this.remove(memberPanel);
conferenceMembersPanels.remove(member);
if (!addressesAreEqual(member.getAddress(), focusPeer.getAddress()))
member.removePropertyChangeListener(
(ConferenceMemberPanel) memberPanel);
for(Map.Entry<ConferenceMember, ConferenceMemberPanel> m :
conferenceMembersPanels.entrySet())
{
GridBagConstraints constraints = new GridBagConstraints();
Component mV = m.getValue();
this.remove(mV);
// Add again the member panel to this container
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 0;
constraints.gridy = i;
constraints.weightx = 1;
constraints.weighty = 0;
constraints.insets = new Insets(0, 0, 3, 0);
this.add(mV, constraints);
i++;
}
}
}
/**
* Overrides {@link JComponent#paintComponent(Graphics)} in order to
* customize the background of this panel.
*
* @param g the <tt>Graphics</tt> object used for painting
*/
@Override
public void paintComponent(Graphics g)
{
super.paintComponent(g);
g = g.create();
try
{
AntialiasingManager.activateAntialiasing(g);
g.setColor(Color.LIGHT_GRAY);
g.fillRect(0, 0, this.getWidth(), this.getHeight());
g.setColor(Color.DARK_GRAY);
g.drawLine(0, 0, getWidth(), 0);
g.drawLine(0, getHeight() - 1, getWidth(), getHeight() - 1);
}
finally
{
g.dispose();
}
}
/**
* Reloads default avatar icon.
*/
public void loadSkin() {}
/**
* Sets the name of the peer.
*
* @param name the name of the peer
*/
public void setPeerName(String name)
{
focusPeerPanel.setPeerName(name);
}
/**
* Sets the <tt>image</tt> of the peer.
*
* @param image the image to set
*/
public void setPeerImage(byte[] image)
{
focusPeerPanel.setPeerImage(image);
}
/**
* Sets the state of the contained call peer by specifying the
* state name.
*
* @param state the state of the contained call peer
*/
public void setPeerState(String state)
{
focusPeerPanel.setPeerState(state);
}
/**
* Sets the reason of a call failure if one occurs. The renderer should
* display this reason to the user.
*
* @param reason the reason of the error to set
*/
public void setErrorReason(String reason)
{
focusPeerPanel.setErrorReason(reason);
}
/**
* Sets the mute property value.
*
* @param isMute indicates if the call with this peer is
* muted
*/
public void setMute(boolean isMute)
{
focusPeerPanel.setMute(isMute);
}
/**
* Sets the "on hold" property value.
*
* @param isOnHold indicates if the call with this peer is put on hold
*/
public void setOnHold(boolean isOnHold)
{
focusPeerPanel.setOnHold(isOnHold);
}
/**
* Indicates that the security is turned on.
*
* @param evt Details about the event that caused this message.
*/
public void securityOn(CallPeerSecurityOnEvent evt)
{
focusPeerPanel.securityOn(evt);
for (ConferenceMemberPanel member : conferenceMembersPanels.values())
{
member.securityOn(evt);
}
}
/**
* Indicates that the security is turned off.
*
* @param evt Details about the event that caused this message.
*/
public void securityOff(CallPeerSecurityOffEvent evt)
{
focusPeerPanel.securityOff(evt);
for (ConferenceMemberPanel member : conferenceMembersPanels.values())
{
member.securityOff(evt);
}
}
/**
* Sets the call peer adapter that manages all related listeners.
*
* @param adapter the call peer adapter
*/
public void setCallPeerAdapter(CallPeerAdapter adapter)
{
focusPeerPanel.setCallPeerAdapter(adapter);
}
/**
* Returns the call peer adapter that manages all related listeners.
*
* @return the call peer adapter
*/
public CallPeerAdapter getCallPeerAdapter()
{
return focusPeerPanel.getCallPeerAdapter();
}
/**
* Prints the given DTMG character through this <tt>CallPeerRenderer</tt>.
*
* @param dtmfChar the DTMF char to print
*/
public void printDTMFTone(char dtmfChar)
{
focusPeerPanel.printDTMFTone(dtmfChar);
}
/**
* Returns the parent <tt>CallPanel</tt> containing this renderer.
*
* @return the parent <tt>CallPanel</tt> containing this renderer
*/
public CallPanel getCallPanel()
{
return callPanel;
}
/**
* Returns the parent call renderer.
*
* @return the parent call renderer
*/
public CallRenderer getCallRenderer()
{
return callRenderer;
}
/**
* Shows/hides the local video component.
*
* @param isVisible <tt>true</tt> to show the local video, <tt>false</tt> -
* otherwise
*/
public void setLocalVideoVisible(boolean isVisible)
{
focusPeerPanel.setLocalVideoVisible(isVisible);
}
/**
* Indicates if the local video component is currently visible.
*
* @return <tt>true</tt> if the local video component is currently visible,
* <tt>false</tt> - otherwise
*/
public boolean isLocalVideoVisible()
{
return focusPeerPanel.isLocalVideoVisible();
}
/**
* Returns the component associated with this renderer.
*
* @return the component associated with this renderer
*/
public Component getComponent()
{
return this;
}
/**
* Indicates that the given conference member has been added to the given
* peer.
*
* @param callPeer the parent call peer
* @param conferenceMember the member that was added
*/
public void conferenceMemberAdded( CallPeer callPeer,
ConferenceMember conferenceMember)
{
addConferenceMemberPanel(conferenceMember);
callPanel.refreshContainer();
}
/**
* Indicates that the given conference member has been removed from the
* given peer.
*
* @param callPeer the parent call peer
* @param conferenceMember the member that was removed
*/
public void conferenceMemberRemoved(CallPeer callPeer,
ConferenceMember conferenceMember)
{
removeConferenceMemberPanel(conferenceMember);
callPanel.refreshContainer();
}
/**
* Determines whether two specific addresses refer to one and the same
* peer/resource/contact.
* <p>
* <b>Warning</b>: Use the functionality sparingly because it assumes that
* an unspecified service is equal to any service.
* </p>
*
* @param a one of the addresses to be compared
* @param b the other address to be compared to <tt>a</tt>
* @return <tt>true</tt> if <tt>a</tt> and <tt>b</tt> name one and the same
* peer/resource/contact; <tt>false</tt>, otherwise
*/
private static boolean addressesAreEqual(String a, String b)
{
if (a.equals(b))
return true;
//TODO: this doesn't belong here
if(a.startsWith("sip:"))
a = a.substring(4);
if(a.startsWith("sips:"))
a = a.substring(5);
if(b.startsWith("sip:"))
b = b.substring(4);
if(b.startsWith("sips:"))
b = b.substring(5);
int aServiceBegin = a.indexOf('@');
String aUserID;
String aService;
if (aServiceBegin > -1)
{
aUserID = a.substring(0, aServiceBegin);
int slashIndex = a.indexOf("/");
if (slashIndex > 0)
aService = a.substring(aServiceBegin + 1, slashIndex);
else
aService = a.substring(aServiceBegin + 1);
}
else
{
aUserID = a;
aService = null;
}
int bServiceBegin = b.indexOf('@');
String bUserID;
String bService;
if (bServiceBegin > -1)
{
bUserID = b.substring(0, bServiceBegin);
int slashIndex = b.indexOf("/");
if (slashIndex > 0)
bService = b.substring(aServiceBegin + 1, slashIndex);
else
bService = b.substring(aServiceBegin + 1);
}
else
{
bUserID = b;
bService = null;
}
boolean userIDsAreEqual;
if ((aUserID == null) || (aUserID.length() < 1))
userIDsAreEqual = ((bUserID == null) || (bUserID.length() < 1));
else
userIDsAreEqual = aUserID.equals(bUserID);
if (!userIDsAreEqual)
return false;
boolean servicesAreEqual;
/*
* It's probably a veeery long shot but it's assumed here that an
* unspecified service is equal to any service. Such a case is, for
* example, RegistrarLess SIP.
*/
if (((aService == null) || (aService.length() < 1))
|| ((bService == null) || (bService.length() < 1)))
servicesAreEqual = true;
else
servicesAreEqual = aService.equals(bService);
return servicesAreEqual;
}
/**
* Returns null to indicate that there's no conference member sound level
* listener registered with this focus panel.
*/
public ConferenceMembersSoundLevelListener
getConferenceMembersSoundLevelListener()
{
return null;
}
/**
* Returns null to indicate that there's no stream sound level listener
* registered with this focus panel.
*/
public SoundLevelListener getStreamSoundLevelListener()
{
return null;
}
/**
* Initializes security.
*/
private void initSecuritySettings()
{
CallPeerSecurityStatusEvent securityEvent
= focusPeer.getCurrentSecuritySettings();
if (securityEvent instanceof CallPeerSecurityOnEvent)
{
securityOn((CallPeerSecurityOnEvent) securityEvent);
NotificationManager.fireNotification(
NotificationManager.CALL_SECURITY_ON);
}
}
}
|
Remove a warning about a redundant cast
|
src/net/java/sip/communicator/impl/gui/main/call/conference/ConferenceFocusPanel.java
|
Remove a warning about a redundant cast
|
|
Java
|
apache-2.0
|
5805e896d6ec46119df6da73e3c3bb57065bbac5
| 0
|
freme-project/Broker,freme-project/Broker,freme-project/Broker
|
package eu.freme.broker.integration_tests;
import com.hp.hpl.jena.shared.AssertionFailureException;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.request.HttpRequest;
import com.mashape.unirest.request.HttpRequestWithBody;
import eu.freme.conversion.rdf.JenaRDFConversionService;
import eu.freme.conversion.rdf.RDFConstants;
import eu.freme.conversion.rdf.RDFConversionService;
import org.junit.Before;
import org.nlp2rdf.cli.Validate;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/**
* Created by Arne on 29.07.2015.
*/
public abstract class IntegrationTest {
private String url = null;
private String service;
public RDFConversionService converter;
public IntegrationTest(String service){
this.service = service;
}
@Before
public void setup(){
url = IntegrationTestSetup.getURLEndpoint() + service;
converter = (RDFConversionService)IntegrationTestSetup.getContext().getBean(RDFConversionService.class);
}
protected HttpRequestWithBody baseRequestPost(String function) {
return Unirest.post(url + function);
}
public String getUrl() {
return url;
}
protected HttpRequest baseRequestGet( String function) {
return Unirest.get(url + function);
}
protected HttpRequestWithBody baseRequestDelete( String function) {
return Unirest.delete(url + function);
}
protected HttpRequestWithBody baseRequestPut( String function) {
return Unirest.put(url + function);
}
public static String readFile(String file) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8"));
StringBuilder bldr = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
bldr.append(line);
bldr.append("\n");
}
reader.close();
return bldr.toString();
}
public void validateNIFResponse(HttpResponse<String> response, RDFConstants.RDFSerialization nifformat) throws IOException {
assertTrue(response.getStatus() == 200);
assertTrue(response.getBody().length() > 0);
// validate RDF
try {
assertNotNull(converter.unserializeRDF(response.getBody(), nifformat));
} catch (Exception e) {
throw new AssertionFailureException("RDF validation failed");
}
// validate NIF
if (nifformat == RDFConstants.RDFSerialization.TURTLE) {
Validate.main(new String[]{"-i", response.getBody(), "--informat","turtle"});
} else if (nifformat == RDFConstants.RDFSerialization.RDF_XML) {
Validate.main(new String[]{"-i", response.getBody(), "--informat","rdfxml"});
} else {
//Not implemented yet: n3, n-triples, json-ld
// Validate.main(new String[]{"-i", response.getBody()});
}
}
}
|
src/test/java/eu/freme/broker/integration_tests/IntegrationTest.java
|
package eu.freme.broker.integration_tests;
import com.hp.hpl.jena.shared.AssertionFailureException;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.request.HttpRequest;
import com.mashape.unirest.request.HttpRequestWithBody;
import eu.freme.conversion.rdf.JenaRDFConversionService;
import eu.freme.conversion.rdf.RDFConstants;
import org.junit.Before;
import org.nlp2rdf.cli.Validate;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/**
* Created by Arne on 29.07.2015.
*/
public abstract class IntegrationTest {
private String url = null;
private String service;
public RDFConversionService converter;
public IntegrationTest(String service){
this.service = service;
}
@Before
public void setup(){
url = IntegrationTestSetup.getURLEndpoint() + service;
converter = (RDFConversionService)IntegrationTestSetup.getContext().getBean(RDFConversionService.class);
}
protected HttpRequestWithBody baseRequestPost(String function) {
return Unirest.post(url + function);
}
public String getUrl() {
return url;
}
protected HttpRequest baseRequestGet( String function) {
return Unirest.get(url + function);
}
protected HttpRequestWithBody baseRequestDelete( String function) {
return Unirest.delete(url + function);
}
protected HttpRequestWithBody baseRequestPut( String function) {
return Unirest.put(url + function);
}
public static String readFile(String file) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8"));
StringBuilder bldr = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
bldr.append(line);
bldr.append("\n");
}
reader.close();
return bldr.toString();
}
public void validateNIFResponse(HttpResponse<String> response, RDFConstants.RDFSerialization nifformat) throws IOException {
assertTrue(response.getStatus() == 200);
assertTrue(response.getBody().length() > 0);
// validate RDF
try {
assertNotNull(converter.unserializeRDF(response.getBody(), nifformat));
} catch (Exception e) {
throw new AssertionFailureException("RDF validation failed");
}
// validate NIF
if (nifformat == RDFConstants.RDFSerialization.TURTLE) {
Validate.main(new String[]{"-i", response.getBody(), "--informat","turtle"});
} else if (nifformat == RDFConstants.RDFSerialization.RDF_XML) {
Validate.main(new String[]{"-i", response.getBody(), "--informat","rdfxml"});
} else {
//Not implemented yet: n3, n-triples, json-ld
// Validate.main(new String[]{"-i", response.getBody()});
}
}
}
|
Cleaned up Code; added validateNIFResponse function
|
src/test/java/eu/freme/broker/integration_tests/IntegrationTest.java
|
Cleaned up Code; added validateNIFResponse function
|
|
Java
|
apache-2.0
|
a0ac3d7fb1c611faf9b2854d64abbdee6fb78733
| 0
|
bulldog2011/luxun
|
package com.leansoft.luxun.quickstart;
import static org.junit.Assert.*;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.leansoft.luxun.consumer.SimpleConsumer;
import com.leansoft.luxun.message.Message;
import com.leansoft.luxun.message.MessageList;
import com.leansoft.luxun.message.generated.CompressionCodec;
import com.leansoft.luxun.producer.Producer;
import com.leansoft.luxun.producer.ProducerConfig;
import com.leansoft.luxun.producer.ProducerData;
import com.leansoft.luxun.serializer.StringEncoder;
import com.leansoft.luxun.server.LuxunServer;
import com.leansoft.luxun.server.ServerConfig;
import com.leansoft.luxun.utils.TestUtils;
import com.leansoft.luxun.utils.Utils;
public class SimpleDemo {
private int brokerId1 = 0;
private int brokerId2 = 1;
private int port1 = 9092;
private int port2 = 9093;
private LuxunServer server1 = null;
private LuxunServer server2 = null;
private String brokerList = brokerId1 + ":localhost:" + port1 + "," + brokerId2 + ":localhost:" + port2;
private String broker1 = brokerId1 + ":localhost:" + port1;
private SimpleConsumer simpleConsumer1 = null;
private SimpleConsumer simpleConsumer2 = null;
@Before
public void setup() {
// set up 2 brokers
Properties props1 = new Properties();
props1.put("brokerid", String.valueOf(brokerId1));
props1.put("port", String.valueOf(port1));
props1.put("log.dir", TestUtils.createTempDir().getAbsolutePath());
ServerConfig config1 = new ServerConfig(props1);
server1 = new LuxunServer(config1);
server1.startup();
Properties props2 = new Properties();
props2.put("brokerid", String.valueOf(brokerId2));
props2.put("port", String.valueOf(port2));
props2.put("log.dir", TestUtils.createTempDir().getAbsolutePath());
ServerConfig config2 = new ServerConfig(props2);
server2 = new LuxunServer(config2);
server2.startup();
// set up two simple consumers
// create a consumer 1 to connect to the Luxun server running on localhost, port 9092, socket timeout of 60 secs
simpleConsumer1 = new SimpleConsumer("localhost", port1, 60000);
// create a consumer 2 to connect to the Luxun server running on localhost, port 9093, socket timeout of 60 secs
simpleConsumer2 = new SimpleConsumer("localhost", port2, 60000);
}
@Test
public void sendSingleMessage() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("broker.list", broker1);
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
ProducerData<String, String> data = new ProducerData<String, String>("test-topic", "test-message");
producer.send(data);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
// consume by fanout id
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
}
@Test
public void sendMessageWithGZIPCompression() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("broker.list", broker1);
props.put("compression.codec", "1");
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
ProducerData<String, String> data = new ProducerData<String, String>("test-topic", "test-message");
producer.send(data);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertEquals(CompressionCodec.GZIP, messageList.getCompressionCodec());
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
// consume by fanout id
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertEquals(CompressionCodec.GZIP, messageList.getCompressionCodec());
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
}
@Test
public void sendMessageWithAsyncProducer() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("producer.type", "async");
props.put("broker.list", broker1);
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
ProducerData<String, String> data = new ProducerData<String, String>("test-topic", "test-message");
producer.send(data);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
// consume by fanout id
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
}
@Test
public void sendMultipleMessages() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("broker.list", broker1);
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
List<String> messages = new ArrayList<String>();
messages.add("test-message1");
messages.add("test-message2");
messages.add("test-message3");
ProducerData<String, String> data = new ProducerData<String, String>("test-topic", messages);
producer.send(data);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 3);
for(int i = 1; i <= 3; i++) {
Message message = messageList.get(i - 1);
assertEquals("test-message" + i, new String(message.getBytes()));
}
// consume by fanout id
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 3);
for(int i = 1; i <= 3; i++) {
Message message = messageList.get(i - 1);
assertEquals("test-message" + i, new String(message.getBytes()));
}
}
@Test
public void sendMessagesToDifferentTopics() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("broker.list", broker1);
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
ProducerData<String, String> data1 = new ProducerData<String, String>("test-topic1", "test-message1");
producer.send(data1);
ProducerData<String, String> data2 = new ProducerData<String, String>("test-topic2", "test-message2");
producer.send(data2);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic1", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message1", new String(message.getBytes()));
listOfMessageList = simpleConsumer1.consume("test-topic2", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message2", new String(message.getBytes()));
// consume by fanoutId
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic1", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message1", new String(message.getBytes()));
listOfMessageList = simpleConsumer1.consume("test-topic2", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message2", new String(message.getBytes()));
}
@Test
public void sendMessagesWithCustomPartitioner() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("broker.list", brokerList);
props.put("partitioner.class", CustomPartitioner.class.getName());
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
// will be sent to broker 1 since (the length of key % num of brokers) = 0
ProducerData<String, String> data1 = new ProducerData<String, String>("test-topic1", "key1", "test-message1");
producer.send(data1);
// will be went to broker 2 since (the length of key % num of brokers) = 1
ProducerData<String, String> data2 = new ProducerData<String, String>("test-topic2", "key11", "test-message2");
producer.send(data2);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic1", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message1", new String(message.getBytes()));
listOfMessageList = simpleConsumer2.consume("test-topic2", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message2", new String(message.getBytes()));
// consume by fanoutId
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic1", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message1", new String(message.getBytes()));
listOfMessageList = simpleConsumer2.consume("test-topic2", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message2", new String(message.getBytes()));
}
@Test
public void sendMessageWithCustomEncoder() throws Exception {
Properties props = new Properties();
props.put("serializer.class", LogEventEncoder.class.getName());
props.put("broker.list", broker1);
ProducerConfig config = new ProducerConfig(props);
Producer<String, LogEvent> producer = new Producer<String, LogEvent>(config);
LogEvent logEvent = new LogEvent();
logEvent.createdTime = System.currentTimeMillis();
logEvent.hostId = "127.0.0.1";
logEvent.logLevel = LogLevel.INFO;
logEvent.message = "a test log message";
ProducerData<String, LogEvent> data = new ProducerData<String, LogEvent>("log-topic", logEvent);
producer.send(data);
producer.close(); // finish with the producer
// consume by index
LogEventDecoder decoder = new LogEventDecoder();
List<MessageList> listOfMessageList = simpleConsumer1.consume("log-topic", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals(logEvent, decoder.toEvent(message));
// consume by fanout id
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("log-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals(logEvent, decoder.toEvent(message));
}
@Test
public void consumeMessageWithDifferentFanoutId() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("broker.list", broker1);
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
for(int i = 0; i < 100; i++) {
ProducerData<String, String> data = new ProducerData<String, String>("test-topic", "test-message" + i);
producer.send(data);
}
producer.close(); // finish with the producer
// consume by different fanout id independently
String fanoutId = "group-a";
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 100);
for(int i = 0; i < 100; i++) {
MessageList messageList = listOfMessageList.get(i);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message" + i, new String(message.getBytes()));
}
fanoutId = "group-b";
listOfMessageList = simpleConsumer1.consume("test-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 100);
for(int i = 0; i < 100; i++) {
MessageList messageList = listOfMessageList.get(i);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message" + i, new String(message.getBytes()));
}
fanoutId = "group-c";
listOfMessageList = simpleConsumer1.consume("test-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 100);
for(int i = 0; i < 100; i++) {
MessageList messageList = listOfMessageList.get(i);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message" + i, new String(message.getBytes()));
}
}
@After
public void cleanup() throws Exception {
server1.close();
server2.close();
simpleConsumer1.close();
simpleConsumer2.close();
Utils.deleteDirectory(new File(server1.config.getLogDir()));
Utils.deleteDirectory(new File(server2.config.getLogDir()));
Thread.sleep(500);
}
}
|
src/test/java/com/leansoft/luxun/quickstart/SimpleDemo.java
|
package com.leansoft.luxun.quickstart;
import static org.junit.Assert.*;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.leansoft.luxun.consumer.SimpleConsumer;
import com.leansoft.luxun.message.Message;
import com.leansoft.luxun.message.MessageList;
import com.leansoft.luxun.message.generated.CompressionCodec;
import com.leansoft.luxun.producer.Producer;
import com.leansoft.luxun.producer.ProducerConfig;
import com.leansoft.luxun.producer.ProducerData;
import com.leansoft.luxun.serializer.StringEncoder;
import com.leansoft.luxun.server.LuxunServer;
import com.leansoft.luxun.server.ServerConfig;
import com.leansoft.luxun.utils.TestUtils;
import com.leansoft.luxun.utils.Utils;
public class SimpleDemo {
private int brokerId1 = 0;
private int brokerId2 = 1;
private int port1 = 9092;
private int port2 = 9093;
private LuxunServer server1 = null;
private LuxunServer server2 = null;
private String brokerList = brokerId1 + ":localhost:" + port1 + "," + brokerId2 + ":localhost:" + port2;
private String broker1 = brokerId1 + ":localhost:" + port1;
private SimpleConsumer simpleConsumer1 = null;
private SimpleConsumer simpleConsumer2 = null;
@Before
public void setup() {
// set up 2 brokers
Properties props1 = new Properties();
props1.put("brokerid", String.valueOf(brokerId1));
props1.put("port", String.valueOf(port1));
props1.put("log.dir", TestUtils.createTempDir().getAbsolutePath());
ServerConfig config1 = new ServerConfig(props1);
server1 = new LuxunServer(config1);
server1.startup();
Properties props2 = new Properties();
props2.put("brokerid", String.valueOf(brokerId2));
props2.put("port", String.valueOf(port2));
props2.put("log.dir", TestUtils.createTempDir().getAbsolutePath());
ServerConfig config2 = new ServerConfig(props2);
server2 = new LuxunServer(config2);
server2.startup();
// set up two simple consumers
// create a consumer 1 to connect to the Luxun server running on localhost, port 9092, socket timeout of 60 secs
simpleConsumer1 = new SimpleConsumer("localhost", port1, 60000);
// create a consumer 2 to connect to the Luxun server running on localhost, port 9093, socket timeout of 60 secs
simpleConsumer2 = new SimpleConsumer("localhost", port2, 60000);
}
@Test
public void sendSingleMessage() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("broker.list", broker1);
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
ProducerData<String, String> data = new ProducerData<String, String>("test-topic", "test-message");
producer.send(data);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
// consume by fanout id
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
}
@Test
public void sendMessageWithGZIPCompression() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("broker.list", broker1);
props.put("compression.codec", "1");
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
ProducerData<String, String> data = new ProducerData<String, String>("test-topic", "test-message");
producer.send(data);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertEquals(CompressionCodec.GZIP, messageList.getCompressionCodec());
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
// consume by fanout id
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertEquals(CompressionCodec.GZIP, messageList.getCompressionCodec());
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
}
@Test
public void sendMessageWithAsyncProducer() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("producer.type", "async");
props.put("broker.list", broker1);
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
ProducerData<String, String> data = new ProducerData<String, String>("test-topic", "test-message");
producer.send(data);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
// consume by fanout id
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message", new String(message.getBytes()));
}
@Test
public void sendMultipleMessages() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("broker.list", broker1);
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
List<String> messages = new ArrayList<String>();
messages.add("test-message1");
messages.add("test-message2");
messages.add("test-message3");
ProducerData<String, String> data = new ProducerData<String, String>("test-topic", messages);
producer.send(data);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 3);
for(int i = 1; i <= 3; i++) {
Message message = messageList.get(i - 1);
assertEquals("test-message" + i, new String(message.getBytes()));
}
// consume by fanout id
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 3);
for(int i = 1; i <= 3; i++) {
Message message = messageList.get(i - 1);
assertEquals("test-message" + i, new String(message.getBytes()));
}
}
@Test
public void sendMessagesToDifferentTopics() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("broker.list", broker1);
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
ProducerData<String, String> data1 = new ProducerData<String, String>("test-topic1", "test-message1");
producer.send(data1);
ProducerData<String, String> data2 = new ProducerData<String, String>("test-topic2", "test-message2");
producer.send(data2);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic1", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message1", new String(message.getBytes()));
listOfMessageList = simpleConsumer1.consume("test-topic2", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message2", new String(message.getBytes()));
// consume by fanoutId
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic1", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message1", new String(message.getBytes()));
listOfMessageList = simpleConsumer1.consume("test-topic2", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message2", new String(message.getBytes()));
}
@Test
public void sendMessagesWithCustomPartitioner() throws Exception {
Properties props = new Properties();
props.put("serializer.class", StringEncoder.class.getName());
props.put("broker.list", brokerList);
props.put("partitioner.class", CustomPartitioner.class.getName());
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
// will be sent to broker 1 since (the length of key % num of brokers) = 0
ProducerData<String, String> data1 = new ProducerData<String, String>("test-topic1", "key1", "test-message1");
producer.send(data1);
// will be went to broker 2 since (the length of key % num of brokers) = 1
ProducerData<String, String> data2 = new ProducerData<String, String>("test-topic2", "key11", "test-message2");
producer.send(data2);
producer.close(); // finish with the producer
// consume by index
List<MessageList> listOfMessageList = simpleConsumer1.consume("test-topic1", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals("test-message1", new String(message.getBytes()));
listOfMessageList = simpleConsumer2.consume("test-topic2", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message2", new String(message.getBytes()));
// consume by fanoutId
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("test-topic1", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message1", new String(message.getBytes()));
listOfMessageList = simpleConsumer2.consume("test-topic2", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals("test-message2", new String(message.getBytes()));
}
@Test
public void sendMessageWithCustomEncoder() throws Exception {
Properties props = new Properties();
props.put("serializer.class", LogEventEncoder.class.getName());
props.put("broker.list", broker1);
ProducerConfig config = new ProducerConfig(props);
Producer<String, LogEvent> producer = new Producer<String, LogEvent>(config);
LogEvent logEvent = new LogEvent();
logEvent.createdTime = System.currentTimeMillis();
logEvent.hostId = "127.0.0.1";
logEvent.logLevel = LogLevel.INFO;
logEvent.message = "a test log message";
ProducerData<String, LogEvent> data = new ProducerData<String, LogEvent>("log-topic", logEvent);
producer.send(data);
producer.close(); // finish with the producer
// consume by index
LogEventDecoder decoder = new LogEventDecoder();
List<MessageList> listOfMessageList = simpleConsumer1.consume("log-topic", 0, 10000);
assertTrue(listOfMessageList.size() == 1);
MessageList messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
Message message = messageList.get(0);
assertEquals(logEvent, decoder.toEvent(message));
// consume by fanout id
String fanoutId = "demo";
listOfMessageList = simpleConsumer1.consume("log-topic", fanoutId, 10000);
assertTrue(listOfMessageList.size() == 1);
messageList = listOfMessageList.get(0);
assertTrue(messageList.size() == 1);
message = messageList.get(0);
assertEquals(logEvent, decoder.toEvent(message));
}
@After
public void cleanup() throws Exception {
server1.close();
server2.close();
simpleConsumer1.close();
simpleConsumer2.close();
Utils.deleteDirectory(new File(server1.config.getLogDir()));
Utils.deleteDirectory(new File(server2.config.getLogDir()));
Thread.sleep(500);
}
}
|
update demo to add consuming by mulitple fanout ids
|
src/test/java/com/leansoft/luxun/quickstart/SimpleDemo.java
|
update demo to add consuming by mulitple fanout ids
|
|
Java
|
apache-2.0
|
ce407196494eef30530707c08b7e9120518ee80c
| 0
|
d-ellebasi/ietfsched,d-ellebasi/ietfsched,d-ellebasi/ietfsched
|
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ietf.ietfsched.ui;
import org.ietf.ietfsched.R;
import org.ietf.ietfsched.provider.ScheduleContract;
import org.ietf.ietfsched.ui.widget.BlockView;
import org.ietf.ietfsched.ui.widget.BlocksLayout;
import org.ietf.ietfsched.ui.widget.ObservableScrollView;
import org.ietf.ietfsched.ui.widget.Workspace;
// import org.ietf.ietfsched.util.AnalyticsUtils;
import org.ietf.ietfsched.util.Maps;
import org.ietf.ietfsched.util.MotionEventUtils;
import org.ietf.ietfsched.util.NotifyingAsyncQueryHandler;
import org.ietf.ietfsched.util.ParserUtils;
import org.ietf.ietfsched.util.UIUtils;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.database.ContentObserver;
import android.database.Cursor;
import android.graphics.Rect;
import android.graphics.drawable.LayerDrawable;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.provider.BaseColumns;
import android.support.v4.app.Fragment;
import android.text.format.DateUtils;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.TimeZone;
/**
* Shows a horizontally-pageable calendar of conference days. Horizontal paging is achieved using
* {@link Workspace}, and the primary UI classes for rendering the calendar are
* {@link Workspace}, and the primary UI classes for rendering the calendar are
* {@link org.ietf.ietfsched.ui.widget.TimeRulerView},
* {@link BlocksLayout}, and {@link BlockView}.
*/
public class ScheduleFragment extends Fragment implements
NotifyingAsyncQueryHandler.AsyncQueryListener,
ObservableScrollView.OnScrollListener,
View.OnClickListener {
private static final String TAG = "ScheduleFragment";
private static final boolean debbug = true;
/**
* Flags used with {@link android.text.format.DateUtils#formatDateRange}.
*/
private static final int TIME_FLAGS = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_SHOW_WEEKDAY | DateUtils.FORMAT_ABBREV_WEEKDAY;
private static final long[] START_DAYS = new long[] {
/* ParseerUtils defines the time format:
* df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:00.000", Locale.US);
*/
ParserUtils.parseTime("2018-07-14T07:00:00.000"),
ParserUtils.parseTime("2018-07-15T07:00:00.000"),
ParserUtils.parseTime("2018-07-16T07:00:00.000"),
ParserUtils.parseTime("2018-07-17T07:00:00.000"),
ParserUtils.parseTime("2018-07-18T07:00:00.000"),
ParserUtils.parseTime("2018-07-19T07:00:00.000"),
ParserUtils.parseTime("2018-07-20T07:00:00.000")
};
private static final int DISABLED_BLOCK_ALPHA = 100;
private static final HashMap<String, Integer> sTypeColumnMap = buildTypeColumnMap();
// TODO: show blocks that don't fall into columns at the bottom
// public static final String EXTRA_TIME_START = "org.ietf.ietfsched.extra.TIME_START";
// public static final String EXTRA_TIME_END = "org.ietf.ietfsched.extra.TIME_END";
private NotifyingAsyncQueryHandler mHandler;
private Workspace mWorkspace;
private TextView mTitle;
private int mTitleCurrentDayIndex = -1;
private View mLeftIndicator;
private View mRightIndicator;
/**
* A helper class containing object references related to a particular day in the schedule.
*/
private class Day {
private ViewGroup rootView;
private ObservableScrollView scrollView;
private View nowView;
private BlocksLayout blocksView;
private int index = -1;
private String label = null;
private Uri blocksUri = null;
private long timeStart = -1;
private long timeEnd = -1;
}
private List<Day> mDays = new ArrayList<>();
private static HashMap<String, Integer> buildTypeColumnMap() {
final HashMap<String, Integer> map = Maps.newHashMap();
map.put(ParserUtils.BLOCK_TYPE_FOOD, 0);
map.put(ParserUtils.BLOCK_TYPE_SESSION, 1);
map.put(ParserUtils.BLOCK_TYPE_OFFICE_HOURS, 2);
return map;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mHandler = new NotifyingAsyncQueryHandler(getActivity().getContentResolver(), this);
setHasOptionsMenu(true);
// AnalyticsUtils.getInstance(getActivity()).trackPageView("/Schedule");
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
ViewGroup root = (ViewGroup) inflater.inflate(R.layout.fragment_schedule, container, false);
mWorkspace = (Workspace) root.findViewById(R.id.workspace);
mTitle = (TextView) root.findViewById(R.id.block_title);
mLeftIndicator = root.findViewById(R.id.indicator_left);
mLeftIndicator.setOnTouchListener(new View.OnTouchListener() {
public boolean onTouch(View view, MotionEvent motionEvent) {
if ((motionEvent.getAction() & MotionEventUtils.ACTION_MASK)
== MotionEvent.ACTION_DOWN) {
mWorkspace.scrollLeft();
return true;
}
return false;
}
});
mLeftIndicator.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
mWorkspace.scrollLeft();
}
});
mRightIndicator = root.findViewById(R.id.indicator_right);
mRightIndicator.setOnTouchListener(new View.OnTouchListener() {
public boolean onTouch(View view, MotionEvent motionEvent) {
if ((motionEvent.getAction() & MotionEventUtils.ACTION_MASK)
== MotionEvent.ACTION_DOWN) {
mWorkspace.scrollRight();
return true;
}
return false;
}
});
mRightIndicator.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
mWorkspace.scrollRight();
}
});
for (long day : START_DAYS) {
setupDay(inflater, day);
}
// setupDay(inflater, TUE_START);
// setupDay(inflater, WED_START);
updateWorkspaceHeader(0);
mWorkspace.setOnScrollListener(new Workspace.OnScrollListener() {
public void onScroll(float screenFraction) {
updateWorkspaceHeader(Math.round(screenFraction));
}
}, true);
return root;
}
public void updateWorkspaceHeader(int dayIndex) {
if (mTitleCurrentDayIndex == dayIndex) {
return;
}
mTitleCurrentDayIndex = dayIndex;
Day day = mDays.get(dayIndex);
mTitle.setText(day.label);
mLeftIndicator
.setVisibility((dayIndex != 0) ? View.VISIBLE : View.INVISIBLE);
mRightIndicator
.setVisibility((dayIndex < mDays.size() - 1) ? View.VISIBLE : View.INVISIBLE);
}
private void setupDay(LayoutInflater inflater, long startMillis) {
Day day = new Day();
if (debbug) Log.d(TAG, "Setup day");
// Setup data
day.index = mDays.size();
day.timeStart = startMillis;
day.timeEnd = startMillis + DateUtils.DAY_IN_MILLIS;
day.blocksUri = ScheduleContract.Blocks.buildBlocksBetweenDirUri(
day.timeStart, day.timeEnd);
if (debbug) Log.d(TAG, "day block uri " + day.blocksUri);
// Setup views
day.rootView = (ViewGroup) inflater.inflate(R.layout.blocks_content, null);
day.scrollView = (ObservableScrollView) day.rootView.findViewById(R.id.blocks_scroll);
day.scrollView.setOnScrollListener(this);
day.blocksView = (BlocksLayout) day.rootView.findViewById(R.id.blocks);
day.nowView = day.rootView.findViewById(R.id.blocks_now);
day.blocksView.setDrawingCacheEnabled(true);
day.blocksView.setAlwaysDrawnWithCacheEnabled(true);
TimeZone.setDefault(UIUtils.CONFERENCE_TIME_ZONE);
day.label = DateUtils.formatDateTime(getActivity(), startMillis, TIME_FLAGS);
mWorkspace.addView(day.rootView);
mDays.add(day);
}
@Override
public void onResume() {
super.onResume();
// Since we build our views manually instead of using an adapter, we
// need to manually requery every time launched.
requery();
getActivity().getContentResolver().registerContentObserver(
ScheduleContract.Sessions.CONTENT_URI, true, mSessionChangesObserver);
// Start listening for time updates to adjust "now" bar. TIME_TICK is
// triggered once per minute, which is how we move the bar over time.
final IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_TIME_TICK);
filter.addAction(Intent.ACTION_TIME_CHANGED);
filter.addAction(Intent.ACTION_TIMEZONE_CHANGED);
getActivity().registerReceiver(mReceiver, filter, null, new Handler());
}
private void requery() {
for (Day day : mDays) {
mHandler.startQuery(0, day, day.blocksUri, BlocksQuery.PROJECTION,
null, null, ScheduleContract.Blocks.DEFAULT_SORT);
}
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
getActivity().runOnUiThread(new Runnable() {
public void run() {
updateNowView(true);
}
});
}
@Override
public void onPause() {
super.onPause();
getActivity().unregisterReceiver(mReceiver);
getActivity().getContentResolver().unregisterContentObserver(mSessionChangesObserver);
}
/**
* {@inheritDoc}
*/
public void onQueryComplete(int token, Object cookie, Cursor cursor) {
// Log.d(TAG, "onQueryComplete cursor " + cursor + "activity " + getActivity() + "count rows " + cursor.getCount());
if (getActivity() == null) {
return;
}
Day day = (Day) cookie;
// Clear out any existing sessions before inserting again
day.blocksView.removeAllBlocks();
try {
while (cursor.moveToNext()) {
final String type = cursor.getString(BlocksQuery.BLOCK_TYPE);
final Integer column = sTypeColumnMap.get(type);
// TODO: place random blocks at bottom of entire layout
if (column == null) {
continue;
}
final String blockId = cursor.getString(BlocksQuery.BLOCK_ID);
final String title = cursor.getString(BlocksQuery.BLOCK_TITLE);
final long start = cursor.getLong(BlocksQuery.BLOCK_START);
final long end = cursor.getLong(BlocksQuery.BLOCK_END);
final boolean containsStarred = cursor.getInt(BlocksQuery.CONTAINS_STARRED) != 0;
final BlockView blockView = new BlockView(getActivity(), blockId, title, start, end,
containsStarred, column);
final int sessionsCount = cursor.getInt(BlocksQuery.SESSIONS_COUNT);
if (sessionsCount > 0) {
blockView.setOnClickListener(this);
} else {
blockView.setFocusable(false);
blockView.setEnabled(false);
LayerDrawable buttonDrawable = (LayerDrawable) blockView.getBackground();
buttonDrawable.getDrawable(0).setAlpha(DISABLED_BLOCK_ALPHA);
buttonDrawable.getDrawable(2).setAlpha(DISABLED_BLOCK_ALPHA);
}
day.blocksView.addBlock(blockView);
}
} finally {
cursor.close();
}
}
/** {@inheritDoc} */
public void onClick(View view) {
if (view instanceof BlockView) {
// AnalyticsUtils.getInstance(getActivity()).trackEvent(
// "Schedule", "Session Click", title, 0);
final String blockId = ((BlockView) view).getBlockId();
final Uri sessionsUri = ScheduleContract.Blocks.buildSessionsUri(blockId);
final Intent intent = new Intent(Intent.ACTION_VIEW, sessionsUri);
intent.putExtra(SessionsFragment.EXTRA_SCHEDULE_TIME_STRING,
((BlockView) view).getBlockTimeString());
((BaseActivity) getActivity()).openActivityOrFragment(intent);
}
}
/**
* Update position and visibility of "now" view.
*/
private boolean updateNowView(boolean forceScroll) {
final long now = UIUtils.getCurrentTime(getActivity());
Day nowDay = null; // effectively Day corresponding to today
for (Day day : mDays) {
if (now >= day.timeStart && now <= day.timeEnd) {
nowDay = day;
day.nowView.setVisibility(View.VISIBLE);
} else {
day.nowView.setVisibility(View.GONE);
}
}
if (nowDay != null && forceScroll) {
// Scroll to show "now" in center
mWorkspace.setCurrentScreen(nowDay.index);
final int offset = nowDay.scrollView.getHeight() / 2;
nowDay.nowView.requestRectangleOnScreen(new Rect(0, offset, 0, offset), true);
nowDay.blocksView.requestLayout();
return true;
}
return false;
}
public void onScrollChanged(ObservableScrollView view) {
// Keep each day view at the same vertical scroll offset.
final int scrollY = view.getScrollY();
for (Day day : mDays) {
if (day.scrollView != view) {
day.scrollView.scrollTo(0, scrollY);
}
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.schedule_menu_items, menu);
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.menu_now) {
if (!updateNowView(true)) {
Toast.makeText(getActivity(), R.string.toast_now_not_visible,
Toast.LENGTH_SHORT).show();
}
return true;
}
return super.onOptionsItemSelected(item);
}
private ContentObserver mSessionChangesObserver = new ContentObserver(new Handler()) {
@Override
public void onChange(boolean selfChange) {
requery();
}
};
private BroadcastReceiver mReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
// Log.d(TAG, "onReceive time update");
updateNowView(false);
}
};
private interface BlocksQuery {
String[] PROJECTION = {
BaseColumns._ID,
ScheduleContract.Blocks.BLOCK_ID,
ScheduleContract.Blocks.BLOCK_TITLE,
ScheduleContract.Blocks.BLOCK_START,
ScheduleContract.Blocks.BLOCK_END,
ScheduleContract.Blocks.BLOCK_TYPE,
ScheduleContract.Blocks.SESSIONS_COUNT,
ScheduleContract.Blocks.CONTAINS_STARRED,
};
int _ID = 0;
int BLOCK_ID = 1;
int BLOCK_TITLE = 2;
int BLOCK_START = 3;
int BLOCK_END = 4;
int BLOCK_TYPE = 5;
int SESSIONS_COUNT = 6;
int CONTAINS_STARRED = 7;
}
}
|
app/src/main/java/org/ietf/ietfsched/ui/ScheduleFragment.java
|
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ietf.ietfsched.ui;
import org.ietf.ietfsched.R;
import org.ietf.ietfsched.provider.ScheduleContract;
import org.ietf.ietfsched.ui.widget.BlockView;
import org.ietf.ietfsched.ui.widget.BlocksLayout;
import org.ietf.ietfsched.ui.widget.ObservableScrollView;
import org.ietf.ietfsched.ui.widget.Workspace;
// import org.ietf.ietfsched.util.AnalyticsUtils;
import org.ietf.ietfsched.util.Maps;
import org.ietf.ietfsched.util.MotionEventUtils;
import org.ietf.ietfsched.util.NotifyingAsyncQueryHandler;
import org.ietf.ietfsched.util.ParserUtils;
import org.ietf.ietfsched.util.UIUtils;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.database.ContentObserver;
import android.database.Cursor;
import android.graphics.Rect;
import android.graphics.drawable.LayerDrawable;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.provider.BaseColumns;
import android.support.v4.app.Fragment;
import android.text.format.DateUtils;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.TimeZone;
/**
* Shows a horizontally-pageable calendar of conference days. Horizontal paging is achieved using
* {@link Workspace}, and the primary UI classes for rendering the calendar are
* {@link Workspace}, and the primary UI classes for rendering the calendar are
* {@link org.ietf.ietfsched.ui.widget.TimeRulerView},
* {@link BlocksLayout}, and {@link BlockView}.
*/
public class ScheduleFragment extends Fragment implements
NotifyingAsyncQueryHandler.AsyncQueryListener,
ObservableScrollView.OnScrollListener,
View.OnClickListener {
private static final String TAG = "ScheduleFragment";
private static final boolean debbug = true;
/**
* Flags used with {@link android.text.format.DateUtils#formatDateRange}.
*/
private static final int TIME_FLAGS = DateUtils.FORMAT_SHOW_DATE | DateUtils.FORMAT_SHOW_WEEKDAY | DateUtils.FORMAT_ABBREV_WEEKDAY;
private static final long[] START_DAYS = new long[] {
/* ParseerUtils defines the time format:
* df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:00.000", Locale.US);
*/
ParserUtils.parseTime("2018-03-17T07:00:00.000"),
ParserUtils.parseTime("2018-03-18T07:00:00.000"),
ParserUtils.parseTime("2018-03-19T07:00:00.000"),
ParserUtils.parseTime("2018-03-20T07:00:00.000"),
ParserUtils.parseTime("2018-03-21T07:00:00.000"),
ParserUtils.parseTime("2018-03-22T07:00:00.000"),
ParserUtils.parseTime("2018-03-23T07:00:00.000")
};
private static final int DISABLED_BLOCK_ALPHA = 100;
private static final HashMap<String, Integer> sTypeColumnMap = buildTypeColumnMap();
// TODO: show blocks that don't fall into columns at the bottom
// public static final String EXTRA_TIME_START = "org.ietf.ietfsched.extra.TIME_START";
// public static final String EXTRA_TIME_END = "org.ietf.ietfsched.extra.TIME_END";
private NotifyingAsyncQueryHandler mHandler;
private Workspace mWorkspace;
private TextView mTitle;
private int mTitleCurrentDayIndex = -1;
private View mLeftIndicator;
private View mRightIndicator;
/**
* A helper class containing object references related to a particular day in the schedule.
*/
private class Day {
private ViewGroup rootView;
private ObservableScrollView scrollView;
private View nowView;
private BlocksLayout blocksView;
private int index = -1;
private String label = null;
private Uri blocksUri = null;
private long timeStart = -1;
private long timeEnd = -1;
}
private List<Day> mDays = new ArrayList<>();
private static HashMap<String, Integer> buildTypeColumnMap() {
final HashMap<String, Integer> map = Maps.newHashMap();
map.put(ParserUtils.BLOCK_TYPE_FOOD, 0);
map.put(ParserUtils.BLOCK_TYPE_SESSION, 1);
map.put(ParserUtils.BLOCK_TYPE_OFFICE_HOURS, 2);
return map;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mHandler = new NotifyingAsyncQueryHandler(getActivity().getContentResolver(), this);
setHasOptionsMenu(true);
// AnalyticsUtils.getInstance(getActivity()).trackPageView("/Schedule");
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
ViewGroup root = (ViewGroup) inflater.inflate(R.layout.fragment_schedule, container, false);
mWorkspace = (Workspace) root.findViewById(R.id.workspace);
mTitle = (TextView) root.findViewById(R.id.block_title);
mLeftIndicator = root.findViewById(R.id.indicator_left);
mLeftIndicator.setOnTouchListener(new View.OnTouchListener() {
public boolean onTouch(View view, MotionEvent motionEvent) {
if ((motionEvent.getAction() & MotionEventUtils.ACTION_MASK)
== MotionEvent.ACTION_DOWN) {
mWorkspace.scrollLeft();
return true;
}
return false;
}
});
mLeftIndicator.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
mWorkspace.scrollLeft();
}
});
mRightIndicator = root.findViewById(R.id.indicator_right);
mRightIndicator.setOnTouchListener(new View.OnTouchListener() {
public boolean onTouch(View view, MotionEvent motionEvent) {
if ((motionEvent.getAction() & MotionEventUtils.ACTION_MASK)
== MotionEvent.ACTION_DOWN) {
mWorkspace.scrollRight();
return true;
}
return false;
}
});
mRightIndicator.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
mWorkspace.scrollRight();
}
});
for (long day : START_DAYS) {
setupDay(inflater, day);
}
// setupDay(inflater, TUE_START);
// setupDay(inflater, WED_START);
updateWorkspaceHeader(0);
mWorkspace.setOnScrollListener(new Workspace.OnScrollListener() {
public void onScroll(float screenFraction) {
updateWorkspaceHeader(Math.round(screenFraction));
}
}, true);
return root;
}
public void updateWorkspaceHeader(int dayIndex) {
if (mTitleCurrentDayIndex == dayIndex) {
return;
}
mTitleCurrentDayIndex = dayIndex;
Day day = mDays.get(dayIndex);
mTitle.setText(day.label);
mLeftIndicator
.setVisibility((dayIndex != 0) ? View.VISIBLE : View.INVISIBLE);
mRightIndicator
.setVisibility((dayIndex < mDays.size() - 1) ? View.VISIBLE : View.INVISIBLE);
}
private void setupDay(LayoutInflater inflater, long startMillis) {
Day day = new Day();
if (debbug) Log.d(TAG, "Setup day");
// Setup data
day.index = mDays.size();
day.timeStart = startMillis;
day.timeEnd = startMillis + DateUtils.DAY_IN_MILLIS;
day.blocksUri = ScheduleContract.Blocks.buildBlocksBetweenDirUri(
day.timeStart, day.timeEnd);
if (debbug) Log.d(TAG, "day block uri " + day.blocksUri);
// Setup views
day.rootView = (ViewGroup) inflater.inflate(R.layout.blocks_content, null);
day.scrollView = (ObservableScrollView) day.rootView.findViewById(R.id.blocks_scroll);
day.scrollView.setOnScrollListener(this);
day.blocksView = (BlocksLayout) day.rootView.findViewById(R.id.blocks);
day.nowView = day.rootView.findViewById(R.id.blocks_now);
day.blocksView.setDrawingCacheEnabled(true);
day.blocksView.setAlwaysDrawnWithCacheEnabled(true);
TimeZone.setDefault(UIUtils.CONFERENCE_TIME_ZONE);
day.label = DateUtils.formatDateTime(getActivity(), startMillis, TIME_FLAGS);
mWorkspace.addView(day.rootView);
mDays.add(day);
}
@Override
public void onResume() {
super.onResume();
// Since we build our views manually instead of using an adapter, we
// need to manually requery every time launched.
requery();
getActivity().getContentResolver().registerContentObserver(
ScheduleContract.Sessions.CONTENT_URI, true, mSessionChangesObserver);
// Start listening for time updates to adjust "now" bar. TIME_TICK is
// triggered once per minute, which is how we move the bar over time.
final IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_TIME_TICK);
filter.addAction(Intent.ACTION_TIME_CHANGED);
filter.addAction(Intent.ACTION_TIMEZONE_CHANGED);
getActivity().registerReceiver(mReceiver, filter, null, new Handler());
}
private void requery() {
for (Day day : mDays) {
mHandler.startQuery(0, day, day.blocksUri, BlocksQuery.PROJECTION,
null, null, ScheduleContract.Blocks.DEFAULT_SORT);
}
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
getActivity().runOnUiThread(new Runnable() {
public void run() {
updateNowView(true);
}
});
}
@Override
public void onPause() {
super.onPause();
getActivity().unregisterReceiver(mReceiver);
getActivity().getContentResolver().unregisterContentObserver(mSessionChangesObserver);
}
/**
* {@inheritDoc}
*/
public void onQueryComplete(int token, Object cookie, Cursor cursor) {
// Log.d(TAG, "onQueryComplete cursor " + cursor + "activity " + getActivity() + "count rows " + cursor.getCount());
if (getActivity() == null) {
return;
}
Day day = (Day) cookie;
// Clear out any existing sessions before inserting again
day.blocksView.removeAllBlocks();
try {
while (cursor.moveToNext()) {
final String type = cursor.getString(BlocksQuery.BLOCK_TYPE);
final Integer column = sTypeColumnMap.get(type);
// TODO: place random blocks at bottom of entire layout
if (column == null) {
continue;
}
final String blockId = cursor.getString(BlocksQuery.BLOCK_ID);
final String title = cursor.getString(BlocksQuery.BLOCK_TITLE);
final long start = cursor.getLong(BlocksQuery.BLOCK_START);
final long end = cursor.getLong(BlocksQuery.BLOCK_END);
final boolean containsStarred = cursor.getInt(BlocksQuery.CONTAINS_STARRED) != 0;
final BlockView blockView = new BlockView(getActivity(), blockId, title, start, end,
containsStarred, column);
final int sessionsCount = cursor.getInt(BlocksQuery.SESSIONS_COUNT);
if (sessionsCount > 0) {
blockView.setOnClickListener(this);
} else {
blockView.setFocusable(false);
blockView.setEnabled(false);
LayerDrawable buttonDrawable = (LayerDrawable) blockView.getBackground();
buttonDrawable.getDrawable(0).setAlpha(DISABLED_BLOCK_ALPHA);
buttonDrawable.getDrawable(2).setAlpha(DISABLED_BLOCK_ALPHA);
}
day.blocksView.addBlock(blockView);
}
} finally {
cursor.close();
}
}
/** {@inheritDoc} */
public void onClick(View view) {
if (view instanceof BlockView) {
// AnalyticsUtils.getInstance(getActivity()).trackEvent(
// "Schedule", "Session Click", title, 0);
final String blockId = ((BlockView) view).getBlockId();
final Uri sessionsUri = ScheduleContract.Blocks.buildSessionsUri(blockId);
final Intent intent = new Intent(Intent.ACTION_VIEW, sessionsUri);
intent.putExtra(SessionsFragment.EXTRA_SCHEDULE_TIME_STRING,
((BlockView) view).getBlockTimeString());
((BaseActivity) getActivity()).openActivityOrFragment(intent);
}
}
/**
* Update position and visibility of "now" view.
*/
private boolean updateNowView(boolean forceScroll) {
final long now = UIUtils.getCurrentTime(getActivity());
Day nowDay = null; // effectively Day corresponding to today
for (Day day : mDays) {
if (now >= day.timeStart && now <= day.timeEnd) {
nowDay = day;
day.nowView.setVisibility(View.VISIBLE);
} else {
day.nowView.setVisibility(View.GONE);
}
}
if (nowDay != null && forceScroll) {
// Scroll to show "now" in center
mWorkspace.setCurrentScreen(nowDay.index);
final int offset = nowDay.scrollView.getHeight() / 2;
nowDay.nowView.requestRectangleOnScreen(new Rect(0, offset, 0, offset), true);
nowDay.blocksView.requestLayout();
return true;
}
return false;
}
public void onScrollChanged(ObservableScrollView view) {
// Keep each day view at the same vertical scroll offset.
final int scrollY = view.getScrollY();
for (Day day : mDays) {
if (day.scrollView != view) {
day.scrollView.scrollTo(0, scrollY);
}
}
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.schedule_menu_items, menu);
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.menu_now) {
if (!updateNowView(true)) {
Toast.makeText(getActivity(), R.string.toast_now_not_visible,
Toast.LENGTH_SHORT).show();
}
return true;
}
return super.onOptionsItemSelected(item);
}
private ContentObserver mSessionChangesObserver = new ContentObserver(new Handler()) {
@Override
public void onChange(boolean selfChange) {
requery();
}
};
private BroadcastReceiver mReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
// Log.d(TAG, "onReceive time update");
updateNowView(false);
}
};
private interface BlocksQuery {
String[] PROJECTION = {
BaseColumns._ID,
ScheduleContract.Blocks.BLOCK_ID,
ScheduleContract.Blocks.BLOCK_TITLE,
ScheduleContract.Blocks.BLOCK_START,
ScheduleContract.Blocks.BLOCK_END,
ScheduleContract.Blocks.BLOCK_TYPE,
ScheduleContract.Blocks.SESSIONS_COUNT,
ScheduleContract.Blocks.CONTAINS_STARRED,
};
int _ID = 0;
int BLOCK_ID = 1;
int BLOCK_TITLE = 2;
int BLOCK_START = 3;
int BLOCK_END = 4;
int BLOCK_TYPE = 5;
int SESSIONS_COUNT = 6;
int CONTAINS_STARRED = 7;
}
}
|
Update for IETF102 - Montreal.
July 14-20.
|
app/src/main/java/org/ietf/ietfsched/ui/ScheduleFragment.java
|
Update for IETF102 - Montreal. July 14-20.
|
|
Java
|
apache-2.0
|
e88d1ee11c0f2df00da7e8c633c55897212dbf89
| 0
|
leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,apache/incubator-shardingsphere,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere
|
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.proxy.transport.mysql.packet.command.query.binary.execute.protocol;
import io.shardingsphere.proxy.transport.mysql.packet.MySQLPacketPayload;
import java.sql.Timestamp;
import java.util.Calendar;
/**
* Binary protocol value for date.
*
* @author zhangyonglun
* @author zhangliang
*/
public final class DateBinaryProtocolValue implements BinaryProtocolValue {
@Override
public Object read(final MySQLPacketPayload payload) {
int length = payload.readInt1();
switch (length) {
case 0:
return new Timestamp(0L);
case 4:
return getTimestampForDate(payload);
case 7:
return getTimestampForDatetime(payload);
case 11:
Timestamp result = getTimestampForDatetime(payload);
result.setNanos(payload.readInt4());
return result;
default:
throw new IllegalArgumentException(String.format("Wrong length '%d' of MYSQL_TYPE_TIME", length));
}
}
private Timestamp getTimestampForDate(final MySQLPacketPayload payload) {
Calendar result = Calendar.getInstance();
result.set(payload.readInt2(), payload.readInt1() - 1, payload.readInt1());
return new Timestamp(result.getTimeInMillis());
}
private Timestamp getTimestampForDatetime(final MySQLPacketPayload payload) {
Calendar result = Calendar.getInstance();
result.set(payload.readInt2(), payload.readInt1() - 1, payload.readInt1(), payload.readInt1(), payload.readInt1(), payload.readInt1());
return new Timestamp(result.getTimeInMillis());
}
@Override
public void write(final MySQLPacketPayload payload, final Object value) {
// TODO :yonglun confirm here is cannot set YEAR == 0, it at least 1970 here
Timestamp timestamp = (Timestamp) value;
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(timestamp.getTime());
int year = calendar.get(Calendar.YEAR);
// TODO :yonglun confirm here is month + 1, and isDateAbsent adjust is 0 == month, is it never == 0?
int month = calendar.get(Calendar.MONTH) + 1;
int dayOfMonth = calendar.get(Calendar.DAY_OF_MONTH);
int hourOfDay = calendar.get(Calendar.HOUR_OF_DAY);
int minutes = calendar.get(Calendar.MINUTE);
int seconds = calendar.get(Calendar.SECOND);
int nanos = timestamp.getNanos();
boolean isDateAbsent = 0 == year && 0 == month && 0 == dayOfMonth;
boolean isTimeAbsent = 0 == hourOfDay && 0 == minutes && 0 == seconds;
boolean isNanosAbsent = 0 == nanos;
if (isDateAbsent && isTimeAbsent && isNanosAbsent) {
payload.writeInt1(0);
return;
}
if (isTimeAbsent && isNanosAbsent) {
payload.writeInt1(4);
writeDate(payload, year, month, dayOfMonth);
return;
}
if (isNanosAbsent) {
payload.writeInt1(7);
writeDate(payload, year, month, dayOfMonth);
writeTime(payload, hourOfDay, minutes, seconds);
return;
}
payload.writeInt1(11);
writeDate(payload, year, month, dayOfMonth);
writeTime(payload, hourOfDay, minutes, seconds);
writeNanos(payload, nanos);
}
private void writeDate(final MySQLPacketPayload payload, final int year, final int month, final int dayOfMonth) {
payload.writeInt2(year);
payload.writeInt1(month);
payload.writeInt1(dayOfMonth);
}
private void writeTime(final MySQLPacketPayload payload, final int hourOfDay, final int minutes, final int seconds) {
payload.writeInt1(hourOfDay);
payload.writeInt1(minutes);
payload.writeInt1(seconds);
}
private void writeNanos(final MySQLPacketPayload payload, final int nanos) {
payload.writeInt4(nanos);
}
}
|
sharding-proxy/src/main/java/io/shardingsphere/proxy/transport/mysql/packet/command/query/binary/execute/protocol/DateBinaryProtocolValue.java
|
/*
* Copyright 2016-2018 shardingsphere.io.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package io.shardingsphere.proxy.transport.mysql.packet.command.query.binary.execute.protocol;
import io.shardingsphere.proxy.transport.mysql.packet.MySQLPacketPayload;
import java.sql.Timestamp;
import java.util.Calendar;
/**
* Binary protocol value for date.
*
* @author zhangyonglun
* @author zhangliang
*/
public final class DateBinaryProtocolValue implements BinaryProtocolValue {
@Override
public Object read(final MySQLPacketPayload payload) {
int length = payload.readInt1();
switch (length) {
case 0:
return new Timestamp(0L);
case 4:
return getTimestampForDate(payload);
case 7:
return getTimestampForDatetime(payload);
case 11:
Timestamp result = getTimestampForDatetime(payload);
result.setNanos(payload.readInt4());
return result;
default:
throw new IllegalArgumentException(String.format("Wrong length '%d' of MYSQL_TYPE_TIME", length));
}
}
private Timestamp getTimestampForDate(final MySQLPacketPayload payload) {
Calendar result = Calendar.getInstance();
result.set(payload.readInt2(), payload.readInt1() - 1, payload.readInt1());
return new Timestamp(result.getTimeInMillis());
}
private Timestamp getTimestampForDatetime(final MySQLPacketPayload payload) {
Calendar result = Calendar.getInstance();
result.set(payload.readInt2(), payload.readInt1() - 1, payload.readInt1(), payload.readInt1(), payload.readInt1(), payload.readInt1());
return new Timestamp(result.getTimeInMillis());
}
@Override
public void write(final MySQLPacketPayload payload, final Object value) {
Timestamp timestamp = (Timestamp) value;
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(timestamp.getTime());
int year = calendar.get(Calendar.YEAR);
int month = calendar.get(Calendar.MONTH) + 1;
int dayOfMonth = calendar.get(Calendar.DAY_OF_MONTH);
int hourOfDay = calendar.get(Calendar.HOUR_OF_DAY);
int minutes = calendar.get(Calendar.MINUTE);
int seconds = calendar.get(Calendar.SECOND);
int nanos = timestamp.getNanos();
boolean isDateAbsent = 0 == year && 0 == month && 0 == dayOfMonth;
boolean isTimeAbsent = 0 == hourOfDay && 0 == minutes && 0 == seconds;
boolean isNanosAbsent = 0 == nanos;
if (isDateAbsent && isTimeAbsent && isNanosAbsent) {
payload.writeInt1(0);
return;
}
if (isTimeAbsent && isNanosAbsent) {
payload.writeInt1(4);
writeDate(payload, year, month, dayOfMonth);
return;
}
if (isNanosAbsent) {
payload.writeInt1(7);
writeDate(payload, year, month, dayOfMonth);
writeTime(payload, hourOfDay, minutes, seconds);
return;
}
payload.writeInt1(11);
writeDate(payload, year, month, dayOfMonth);
writeTime(payload, hourOfDay, minutes, seconds);
writeNanos(payload, nanos);
}
private void writeDate(final MySQLPacketPayload payload, final int year, final int month, final int dayOfMonth) {
payload.writeInt2(year);
payload.writeInt1(month);
payload.writeInt1(dayOfMonth);
}
private void writeTime(final MySQLPacketPayload payload, final int hourOfDay, final int minutes, final int seconds) {
payload.writeInt1(hourOfDay);
payload.writeInt1(minutes);
payload.writeInt1(seconds);
}
private void writeNanos(final MySQLPacketPayload payload, final int nanos) {
payload.writeInt4(nanos);
}
}
|
add todo to be confirmed
|
sharding-proxy/src/main/java/io/shardingsphere/proxy/transport/mysql/packet/command/query/binary/execute/protocol/DateBinaryProtocolValue.java
|
add todo to be confirmed
|
|
Java
|
apache-2.0
|
f4ce07c81bfb027d3245533c5dd0e5ef35ae3a7b
| 0
|
Squallium/android-commons-inapp,Squallium/android-commons-inapp
|
/* Copyright (c) 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squallium.commons.inapp.google;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Represents a block of information about in-app items.
* An Inventory is returned by such methods as {@link IabHelper#queryInventory}.
*/
public class Inventory {
Map<String,SkuDetails> mSkuMap = new HashMap<String,SkuDetails>();
Map<String,Purchase> mPurchaseMap = new HashMap<String,Purchase>();
Inventory() { }
/** Returns the listing details for an in-app product. */
public SkuDetails getSkuDetails(String sku) {
return mSkuMap.get(sku);
}
/** Returns purchase information for a given product, or null if there is no purchase. */
public Purchase getPurchase(String sku) {
return mPurchaseMap.get(sku);
}
/** Returns whether or not there exists a purchase of the given product. */
public boolean hasPurchase(String sku) {
return mPurchaseMap.containsKey(sku);
}
/** Return whether or not details about the given product are available. */
public boolean hasDetails(String sku) {
return mSkuMap.containsKey(sku);
}
/**
* Erase a purchase (locally) from the inventory, given its product ID. This just
* modifies the Inventory object locally and has no effect on the server! This is
* useful when you have an existing Inventory object which you know to be up to date,
* and you have just consumed an item successfully, which means that erasing its
* purchase data from the Inventory you already have is quicker than querying for
* a new Inventory.
*/
public void erasePurchase(String sku) {
if (mPurchaseMap.containsKey(sku)) mPurchaseMap.remove(sku);
}
/** Returns a list of all owned product IDs. */
public List<String> getAllOwnedSkus() {
return new ArrayList<String>(mPurchaseMap.keySet());
}
/** Returns a list of all owned product IDs of a given type */
List<String> getAllOwnedSkus(String itemType) {
List<String> result = new ArrayList<String>();
for (Purchase p : mPurchaseMap.values()) {
if (p.getItemType().equals(itemType)) result.add(p.getSku());
}
return result;
}
/** Returns a list of all purchases. */
List<Purchase> getAllPurchases() {
return new ArrayList<Purchase>(mPurchaseMap.values());
}
void addSkuDetails(SkuDetails d) {
mSkuMap.put(d.getSku(), d);
}
void addPurchase(Purchase p) {
mPurchaseMap.put(p.getSku(), p);
}
}
|
library/src/com/squallium/commons/inapp/google/Inventory.java
|
/* Copyright (c) 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squallium.commons.inapp.google;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Represents a block of information about in-app items.
* An Inventory is returned by such methods as {@link IabHelper#queryInventory}.
*/
public class Inventory {
Map<String,SkuDetails> mSkuMap = new HashMap<String,SkuDetails>();
Map<String,Purchase> mPurchaseMap = new HashMap<String,Purchase>();
Inventory() { }
/** Returns the listing details for an in-app product. */
public SkuDetails getSkuDetails(String sku) {
return mSkuMap.get(sku);
}
/** Returns purchase information for a given product, or null if there is no purchase. */
public Purchase getPurchase(String sku) {
return mPurchaseMap.get(sku);
}
/** Returns whether or not there exists a purchase of the given product. */
public boolean hasPurchase(String sku) {
return mPurchaseMap.containsKey(sku);
}
/** Return whether or not details about the given product are available. */
public boolean hasDetails(String sku) {
return mSkuMap.containsKey(sku);
}
/**
* Erase a purchase (locally) from the inventory, given its product ID. This just
* modifies the Inventory object locally and has no effect on the server! This is
* useful when you have an existing Inventory object which you know to be up to date,
* and you have just consumed an item successfully, which means that erasing its
* purchase data from the Inventory you already have is quicker than querying for
* a new Inventory.
*/
public void erasePurchase(String sku) {
if (mPurchaseMap.containsKey(sku)) mPurchaseMap.remove(sku);
}
/** Returns a list of all owned product IDs. */
List<String> getAllOwnedSkus() {
return new ArrayList<String>(mPurchaseMap.keySet());
}
/** Returns a list of all owned product IDs of a given type */
List<String> getAllOwnedSkus(String itemType) {
List<String> result = new ArrayList<String>();
for (Purchase p : mPurchaseMap.values()) {
if (p.getItemType().equals(itemType)) result.add(p.getSku());
}
return result;
}
/** Returns a list of all purchases. */
List<Purchase> getAllPurchases() {
return new ArrayList<Purchase>(mPurchaseMap.values());
}
void addSkuDetails(SkuDetails d) {
mSkuMap.put(d.getSku(), d);
}
void addPurchase(Purchase p) {
mPurchaseMap.put(p.getSku(), p);
}
}
|
Changed visibility of allOwnedSkus
|
library/src/com/squallium/commons/inapp/google/Inventory.java
|
Changed visibility of allOwnedSkus
|
|
Java
|
apache-2.0
|
ab08ff684a215efcce06f4254dad0e3bcaeaed46
| 0
|
dilini-muthumala/siddhi,dilini-muthumala/siddhi,tishan89/siddhi,suhothayan/siddhi,gokul/siddhi,grainier/siddhi,mohanvive/siddhi,gokul/siddhi,grainier/siddhi,wso2/siddhi,mohanvive/siddhi,wso2/siddhi,tishan89/siddhi,suhothayan/siddhi
|
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.core.aggregation;
import io.siddhi.core.config.SiddhiQueryContext;
import io.siddhi.core.event.ComplexEventChunk;
import io.siddhi.core.event.state.MetaStateEvent;
import io.siddhi.core.event.state.StateEvent;
import io.siddhi.core.event.stream.MetaStreamEvent;
import io.siddhi.core.event.stream.StreamEvent;
import io.siddhi.core.exception.QueryableRecordTableException;
import io.siddhi.core.exception.SiddhiAppCreationException;
import io.siddhi.core.executor.ConstantExpressionExecutor;
import io.siddhi.core.executor.ExpressionExecutor;
import io.siddhi.core.executor.VariableExpressionExecutor;
import io.siddhi.core.query.input.stream.single.SingleStreamRuntime;
import io.siddhi.core.query.processor.ProcessingMode;
import io.siddhi.core.query.processor.stream.window.QueryableProcessor;
import io.siddhi.core.query.selector.GroupByKeyGenerator;
import io.siddhi.core.table.Table;
import io.siddhi.core.util.collection.operator.CompiledCondition;
import io.siddhi.core.util.collection.operator.CompiledSelection;
import io.siddhi.core.util.collection.operator.IncrementalAggregateCompileCondition;
import io.siddhi.core.util.collection.operator.MatchingMetaInfoHolder;
import io.siddhi.core.util.parser.ExpressionParser;
import io.siddhi.core.util.parser.OperatorParser;
import io.siddhi.core.util.parser.helper.QueryParserHelper;
import io.siddhi.core.util.snapshot.SnapshotService;
import io.siddhi.core.util.statistics.LatencyTracker;
import io.siddhi.core.util.statistics.MemoryCalculable;
import io.siddhi.core.util.statistics.ThroughputTracker;
import io.siddhi.core.util.statistics.metrics.Level;
import io.siddhi.query.api.aggregation.TimePeriod;
import io.siddhi.query.api.aggregation.Within;
import io.siddhi.query.api.definition.AbstractDefinition;
import io.siddhi.query.api.definition.AggregationDefinition;
import io.siddhi.query.api.definition.Attribute;
import io.siddhi.query.api.definition.StreamDefinition;
import io.siddhi.query.api.exception.SiddhiAppValidationException;
import io.siddhi.query.api.execution.query.selection.OutputAttribute;
import io.siddhi.query.api.execution.query.selection.Selector;
import io.siddhi.query.api.expression.AttributeFunction;
import io.siddhi.query.api.expression.Expression;
import io.siddhi.query.api.expression.Variable;
import io.siddhi.query.api.expression.condition.Compare;
import io.siddhi.query.api.expression.constant.BoolConstant;
import org.apache.log4j.Logger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static io.siddhi.core.util.SiddhiConstants.AGG_EXTERNAL_TIMESTAMP_COL;
import static io.siddhi.core.util.SiddhiConstants.AGG_LAST_TIMESTAMP_COL;
import static io.siddhi.core.util.SiddhiConstants.AGG_SHARD_ID_COL;
import static io.siddhi.core.util.SiddhiConstants.AGG_START_TIMESTAMP_COL;
import static io.siddhi.core.util.SiddhiConstants.UNKNOWN_STATE;
import static io.siddhi.query.api.expression.Expression.Time.normalizeDuration;
/**
* Aggregation runtime managing aggregation operations for aggregation definition.
*/
public class AggregationRuntime implements MemoryCalculable {
private static final Logger LOG = Logger.getLogger(AggregationRuntime.class);
private AggregationDefinition aggregationDefinition;
private boolean isProcessingOnExternalTime;
private boolean isDistributed;
private List<TimePeriod.Duration> incrementalDurations;
private Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap;
private Map<TimePeriod.Duration, Table> aggregationTables;
private List<String> tableAttributesNameList;
private MetaStreamEvent aggregateMetaSteamEvent;
private List<ExpressionExecutor> outputExpressionExecutors;
private Map<TimePeriod.Duration, List<ExpressionExecutor>> aggregateProcessingExecutorsMap;
private ExpressionExecutor shouldUpdateTimestamp;
private Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorMap;
private boolean isOptimisedLookup;
private List<OutputAttribute> defaultSelectorList;
private List<String> groupByVariablesList;
private boolean isLatestEventColAdded;
private int baseAggregatorBeginIndex;
private List<Expression> finalBaseExpressionsList;
private IncrementalDataPurger incrementalDataPurger;
private IncrementalExecutorsInitialiser incrementalExecutorsInitialiser;
private SingleStreamRuntime singleStreamRuntime;
private LatencyTracker latencyTrackerFind;
private ThroughputTracker throughputTrackerFind;
private boolean isFirstEventArrived;
public AggregationRuntime(AggregationDefinition aggregationDefinition, boolean isProcessingOnExternalTime,
boolean isDistributed, List<TimePeriod.Duration> incrementalDurations,
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap,
Map<TimePeriod.Duration, Table> aggregationTables,
List<ExpressionExecutor> outputExpressionExecutors,
Map<TimePeriod.Duration, List<ExpressionExecutor>> aggregateProcessingExecutorsMap,
ExpressionExecutor shouldUpdateTimestamp,
Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorMap,
boolean isOptimisedLookup, List<OutputAttribute> defaultSelectorList,
List<String> groupByVariablesList,
boolean isLatestEventColAdded, int baseAggregatorBeginIndex,
List<Expression> finalBaseExpressionList, IncrementalDataPurger incrementalDataPurger,
IncrementalExecutorsInitialiser incrementalExecutorInitialiser,
SingleStreamRuntime singleStreamRuntime, MetaStreamEvent tableMetaStreamEvent,
LatencyTracker latencyTrackerFind, ThroughputTracker throughputTrackerFind) {
this.aggregationDefinition = aggregationDefinition;
this.isProcessingOnExternalTime = isProcessingOnExternalTime;
this.isDistributed = isDistributed;
this.incrementalDurations = incrementalDurations;
this.incrementalExecutorMap = incrementalExecutorMap;
this.aggregationTables = aggregationTables;
this.tableAttributesNameList = tableMetaStreamEvent.getInputDefinitions().get(0).getAttributeList()
.stream().map(Attribute::getName).collect(Collectors.toList());
this.outputExpressionExecutors = outputExpressionExecutors;
this.aggregateProcessingExecutorsMap = aggregateProcessingExecutorsMap;
this.shouldUpdateTimestamp = shouldUpdateTimestamp;
this.groupByKeyGeneratorMap = groupByKeyGeneratorMap;
this.isOptimisedLookup = isOptimisedLookup;
this.defaultSelectorList = defaultSelectorList;
this.groupByVariablesList = groupByVariablesList;
this.isLatestEventColAdded = isLatestEventColAdded;
this.baseAggregatorBeginIndex = baseAggregatorBeginIndex;
this.finalBaseExpressionsList = finalBaseExpressionList;
this.incrementalDataPurger = incrementalDataPurger;
this.incrementalExecutorsInitialiser = incrementalExecutorInitialiser;
this.singleStreamRuntime = singleStreamRuntime;
this.aggregateMetaSteamEvent = new MetaStreamEvent();
aggregationDefinition.getAttributeList().forEach(this.aggregateMetaSteamEvent::addOutputData);
this.latencyTrackerFind = latencyTrackerFind;
this.throughputTrackerFind = throughputTrackerFind;
}
private static void initMetaStreamEvent(MetaStreamEvent metaStreamEvent, AbstractDefinition inputDefinition,
String inputReferenceId) {
metaStreamEvent.addInputDefinition(inputDefinition);
metaStreamEvent.setInputReferenceId(inputReferenceId);
metaStreamEvent.initializeAfterWindowData();
inputDefinition.getAttributeList().forEach(metaStreamEvent::addData);
}
private static MetaStreamEvent alterMetaStreamEvent(boolean isStoreQuery, MetaStreamEvent originalMetaStreamEvent,
List<Attribute> additionalAttributes) {
StreamDefinition alteredStreamDef = new StreamDefinition();
if (!isStoreQuery) {
for (Attribute attribute : originalMetaStreamEvent.getLastInputDefinition().getAttributeList()) {
alteredStreamDef.attribute(attribute.getName(), attribute.getType());
}
}
additionalAttributes.forEach(attribute -> alteredStreamDef.attribute(attribute.getName(), attribute.getType()));
initMetaStreamEvent(originalMetaStreamEvent, alteredStreamDef, originalMetaStreamEvent.getInputReferenceId());
return originalMetaStreamEvent;
}
private static MetaStreamEvent createMetaStoreEvent(AbstractDefinition tableDefinition, String referenceId) {
MetaStreamEvent metaStreamEventForTable = new MetaStreamEvent();
metaStreamEventForTable.setEventType(MetaStreamEvent.EventType.TABLE);
initMetaStreamEvent(metaStreamEventForTable, tableDefinition, referenceId);
return metaStreamEventForTable;
}
private static MatchingMetaInfoHolder alterMetaInfoHolderForStoreQuery(
MetaStreamEvent newMetaStreamEventWithStartEnd, MatchingMetaInfoHolder matchingMetaInfoHolder) {
MetaStateEvent metaStateEvent = new MetaStateEvent(2);
MetaStreamEvent incomingMetaStreamEvent = matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvent(0);
metaStateEvent.addEvent(newMetaStreamEventWithStartEnd);
metaStateEvent.addEvent(incomingMetaStreamEvent);
return new MatchingMetaInfoHolder(metaStateEvent, 0, 1,
newMetaStreamEventWithStartEnd.getLastInputDefinition(),
incomingMetaStreamEvent.getLastInputDefinition(), UNKNOWN_STATE);
}
private static MatchingMetaInfoHolder createNewStreamTableMetaInfoHolder(MetaStreamEvent metaStreamEvent,
MetaStreamEvent metaStoreEvent) {
MetaStateEvent metaStateEvent = new MetaStateEvent(2);
metaStateEvent.addEvent(metaStreamEvent);
metaStateEvent.addEvent(metaStoreEvent);
return new MatchingMetaInfoHolder(metaStateEvent, 0, 1,
metaStreamEvent.getLastInputDefinition(), metaStoreEvent.getLastInputDefinition(), UNKNOWN_STATE);
}
public AggregationDefinition getAggregationDefinition() {
return aggregationDefinition;
}
public SingleStreamRuntime getSingleStreamRuntime() {
return singleStreamRuntime;
}
public StreamEvent find(StateEvent matchingEvent, CompiledCondition compiledCondition,
SiddhiQueryContext siddhiQueryContext) {
try {
SnapshotService.getSkipStateStorageThreadLocal().set(true);
if (latencyTrackerFind != null &&
Level.BASIC.compareTo(siddhiQueryContext.getSiddhiAppContext().getRootMetricsLevel()) <= 0) {
latencyTrackerFind.markIn();
throughputTrackerFind.eventIn();
}
if (!isDistributed && !isFirstEventArrived) {
// No need to initialise executors if it is distributed
initialiseExecutors(false);
}
return ((IncrementalAggregateCompileCondition) compiledCondition).find(matchingEvent,
incrementalExecutorMap, aggregateProcessingExecutorsMap, groupByKeyGeneratorMap,
shouldUpdateTimestamp);
} finally {
SnapshotService.getSkipStateStorageThreadLocal().set(null);
if (latencyTrackerFind != null &&
Level.BASIC.compareTo(siddhiQueryContext.getSiddhiAppContext().getRootMetricsLevel()) <= 0) {
latencyTrackerFind.markOut();
}
}
}
public CompiledCondition compileExpression(Expression expression, Within within, Expression per,
List<Variable> queryGroupByList,
MatchingMetaInfoHolder matchingMetaInfoHolder,
List<VariableExpressionExecutor> variableExpressionExecutors,
Map<String, Table> tableMap, SiddhiQueryContext siddhiQueryContext) {
String aggregationName = aggregationDefinition.getId();
boolean isOptimisedTableLookup = isOptimisedLookup;
Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions = new HashMap<>();
CompiledCondition withinInMemoryCompileCondition;
CompiledCondition onCompiledCondition;
List<Attribute> additionalAttributes = new ArrayList<>();
// Define additional attribute list
additionalAttributes.add(new Attribute("_START", Attribute.Type.LONG));
additionalAttributes.add(new Attribute("_END", Attribute.Type.LONG));
int lowerGranularitySize = this.incrementalDurations.size() - 1;
List<String> lowerGranularityAttributes = new ArrayList<>();
if (isDistributed) {
//Add additional attributes to get base aggregation timestamps based on current timestamps
// for values calculated in in-memory in the shards
for (int i = 0; i < lowerGranularitySize; i++) {
String attributeName = "_AGG_TIMESTAMP_FILTER_" + i;
additionalAttributes.add(new Attribute(attributeName, Attribute.Type.LONG));
lowerGranularityAttributes.add(attributeName);
}
}
// Get table definition. Table definitions for all the tables used to persist aggregates are similar.
// Therefore it's enough to get the definition from one table.
AbstractDefinition tableDefinition = aggregationTables.get(incrementalDurations.get(0)).getTableDefinition();
boolean isStoreQuery = matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvents().length == 1;
// Alter existing meta stream event or create new one if a meta stream doesn't exist
// After calling this method the original MatchingMetaInfoHolder's meta stream event would be altered
// Alter meta info holder to contain stream event and aggregate both when it's a store query
MetaStreamEvent metaStreamEventForTableLookups;
if (isStoreQuery) {
metaStreamEventForTableLookups = alterMetaStreamEvent(true, new MetaStreamEvent(), additionalAttributes);
matchingMetaInfoHolder = alterMetaInfoHolderForStoreQuery(metaStreamEventForTableLookups,
matchingMetaInfoHolder);
} else {
metaStreamEventForTableLookups = alterMetaStreamEvent(false,
matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvent(0), additionalAttributes);
}
// Create new MatchingMetaInfoHolder containing newMetaStreamEventWithStartEnd and table meta event
String aggReferenceId = matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvent(1).getInputReferenceId();
MetaStreamEvent metaStoreEventForTableLookups = createMetaStoreEvent(tableDefinition,
aggReferenceId);
// Create new MatchingMetaInfoHolder containing metaStreamEventForTableLookups and table meta event
MatchingMetaInfoHolder metaInfoHolderForTableLookups = createNewStreamTableMetaInfoHolder(
metaStreamEventForTableLookups, metaStoreEventForTableLookups);
// Create per expression executor
ExpressionExecutor perExpressionExecutor;
if (per != null) {
perExpressionExecutor = ExpressionParser.parseExpression(per, matchingMetaInfoHolder.getMetaStateEvent(),
matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors,
false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
if (perExpressionExecutor.getReturnType() != Attribute.Type.STRING) {
throw new SiddhiAppCreationException(
"Query " + siddhiQueryContext.getName() + "'s per value expected a string but found "
+ perExpressionExecutor.getReturnType(),
per.getQueryContextStartIndex(), per.getQueryContextEndIndex());
}
// Additional Per time function verification at compile time if it is a constant
if (perExpressionExecutor instanceof ConstantExpressionExecutor) {
String perValue = ((ConstantExpressionExecutor) perExpressionExecutor).getValue().toString();
try {
normalizeDuration(perValue);
} catch (SiddhiAppValidationException e) {
throw new SiddhiAppValidationException(
"Aggregation Query's per value is expected to be of a valid time function of the " +
"following " + TimePeriod.Duration.SECONDS + ", " + TimePeriod.Duration.MINUTES
+ ", " + TimePeriod.Duration.HOURS + ", " + TimePeriod.Duration.DAYS + ", "
+ TimePeriod.Duration.MONTHS + ", " + TimePeriod.Duration.YEARS + ".");
}
}
} else {
throw new SiddhiAppCreationException("Syntax Error: Aggregation join query must contain a `per` " +
"definition for granularity");
}
// Create start and end time expression
Expression startEndTimeExpression;
ExpressionExecutor startTimeEndTimeExpressionExecutor;
if (within != null) {
if (within.getTimeRange().size() == 1) {
startEndTimeExpression = new AttributeFunction("incrementalAggregator",
"startTimeEndTime", within.getTimeRange().get(0));
} else { // within.getTimeRange().size() == 2
startEndTimeExpression = new AttributeFunction("incrementalAggregator",
"startTimeEndTime", within.getTimeRange().get(0), within.getTimeRange().get(1));
}
startTimeEndTimeExpressionExecutor = ExpressionParser.parseExpression(startEndTimeExpression,
matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap,
variableExpressionExecutors, false, 0,
ProcessingMode.BATCH, false, siddhiQueryContext);
} else {
throw new SiddhiAppCreationException("Syntax Error : Aggregation read query must contain a `within` " +
"definition for filtering of aggregation data.");
}
// Create within expression
Expression timeFilterExpression;
if (isProcessingOnExternalTime) {
timeFilterExpression = Expression.variable(AGG_EXTERNAL_TIMESTAMP_COL);
} else {
timeFilterExpression = Expression.variable(AGG_START_TIMESTAMP_COL);
}
Expression withinExpression;
Expression start = Expression.variable(additionalAttributes.get(0).getName());
Expression end = Expression.variable(additionalAttributes.get(1).getName());
Expression compareWithStartTime = Compare.compare(start, Compare.Operator.LESS_THAN_EQUAL,
timeFilterExpression);
Expression compareWithEndTime = Compare.compare(timeFilterExpression, Compare.Operator.LESS_THAN, end);
withinExpression = Expression.and(compareWithStartTime, compareWithEndTime);
List<ExpressionExecutor> timestampFilterExecutors = new ArrayList<>();
if (isDistributed) {
for (int i = 0; i < lowerGranularitySize; i++) {
Expression[] expressionArray = new Expression[]{
new AttributeFunction("", "currentTimeMillis", null),
Expression.value(this.incrementalDurations.get(i + 1).toString())};
Expression filterExpression = new AttributeFunction("incrementalAggregator",
"getAggregationStartTime", expressionArray);
timestampFilterExecutors.add(ExpressionParser.parseExpression(filterExpression,
matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap,
variableExpressionExecutors, false, 0,
ProcessingMode.BATCH, false, siddhiQueryContext));
}
}
// Create compile condition per each table used to persist aggregates.
// These compile conditions are used to check whether the aggregates in tables are within the given duration.
// Combine with and on condition for table query
boolean shouldApplyReducedCondition = false;
Expression reducedExpression = null;
//Check if there is no on conditions
if (!(expression instanceof BoolConstant)) {
AggregationExpressionBuilder aggregationExpressionBuilder = new AggregationExpressionBuilder(expression);
AggregationExpressionVisitor expressionVisitor = new AggregationExpressionVisitor(
metaStreamEventForTableLookups.getInputReferenceId(),
metaStreamEventForTableLookups.getLastInputDefinition().getAttributeList(),
this.tableAttributesNameList
);
aggregationExpressionBuilder.build(expressionVisitor);
shouldApplyReducedCondition = expressionVisitor.applyReducedExpression();
reducedExpression = expressionVisitor.getReducedExpression();
}
Expression withinExpressionTable;
if (shouldApplyReducedCondition) {
withinExpressionTable = Expression.and(withinExpression, reducedExpression);
} else {
withinExpressionTable = withinExpression;
}
Variable timestampVariable = new Variable(AGG_START_TIMESTAMP_COL);
List<String> queryGroupByNamesList = queryGroupByList.stream()
.map(Variable::getAttributeName)
.collect(Collectors.toList());
boolean queryGroupByContainsTimestamp = queryGroupByNamesList.remove(AGG_START_TIMESTAMP_COL);
boolean isQueryGroupBySame = queryGroupByList.isEmpty() ||
(queryGroupByList.contains(timestampVariable) && queryGroupByNamesList.equals(groupByVariablesList));
List<VariableExpressionExecutor> variableExpExecutorsForTableLookups = new ArrayList<>();
Map<TimePeriod.Duration, CompiledSelection> withinTableCompiledSelection = new HashMap<>();
if (isOptimisedTableLookup) {
Selector selector = Selector.selector();
List<Variable> groupByList = new ArrayList<>();
if (!isQueryGroupBySame) {
if (queryGroupByContainsTimestamp) {
if (isProcessingOnExternalTime) {
groupByList.add(new Variable(AGG_EXTERNAL_TIMESTAMP_COL));
} else {
groupByList.add(new Variable(AGG_START_TIMESTAMP_COL));
}
//Remove timestamp to process the rest
queryGroupByList.remove(timestampVariable);
}
for (Variable queryGroupBy : queryGroupByList) {
if (groupByVariablesList.contains(queryGroupBy.getAttributeName())) {
String referenceId = queryGroupBy.getStreamId();
if (aggReferenceId == null || aggReferenceId.equalsIgnoreCase(referenceId)) {
groupByList.add(queryGroupBy);
}
}
}
// If query group bys are based on joining stream
if (groupByList.isEmpty()) {
isQueryGroupBySame = true;
}
}
if (aggReferenceId != null) {
groupByList.forEach((groupBy) -> groupBy.setStreamId(aggReferenceId));
}
selector.addGroupByList(groupByList);
List<OutputAttribute> selectorList;
if (!isQueryGroupBySame) {
selectorList = constructSelectorList(isProcessingOnExternalTime, isDistributed, isLatestEventColAdded,
baseAggregatorBeginIndex, groupByVariablesList.size(), finalBaseExpressionsList,
tableDefinition, groupByList);
} else {
selectorList = defaultSelectorList;
}
if (aggReferenceId != null) {
for (OutputAttribute outputAttribute : selectorList) {
if (outputAttribute.getExpression() instanceof Variable) {
((Variable) outputAttribute.getExpression()).setStreamId(aggReferenceId);
} else {
for (Expression parameter :
((AttributeFunction) outputAttribute.getExpression()).getParameters()) {
((Variable) parameter).setStreamId(aggReferenceId);
}
}
}
}
selector.addSelectionList(selectorList);
try {
aggregationTables.entrySet().forEach(
(durationTableEntry -> {
CompiledSelection compiledSelection = ((QueryableProcessor) durationTableEntry.getValue())
.compileSelection(
selector, tableDefinition.getAttributeList(), metaInfoHolderForTableLookups,
variableExpExecutorsForTableLookups, tableMap, siddhiQueryContext
);
withinTableCompiledSelection.put(durationTableEntry.getKey(), compiledSelection);
})
);
} catch (SiddhiAppCreationException | QueryableRecordTableException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Aggregation Query optimization failed for aggregation: '" + aggregationName + "'. " +
"Creating table lookup query in normal mode. Reason for failure: " + e.getMessage(), e);
}
isOptimisedTableLookup = false;
}
}
for (Map.Entry<TimePeriod.Duration, Table> entry : aggregationTables.entrySet()) {
CompiledCondition withinTableCompileCondition = entry.getValue().compileCondition(withinExpressionTable,
metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups, tableMap,
siddhiQueryContext);
withinTableCompiledConditions.put(entry.getKey(), withinTableCompileCondition);
}
// Create compile condition for in-memory data.
// This compile condition is used to check whether the running aggregates (in-memory data)
// are within given duration
withinInMemoryCompileCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true),
withinExpression, metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups, tableMap,
siddhiQueryContext);
// Create compile condition for in-memory data, in case of distributed
// Look at the lower level granularities
Map<TimePeriod.Duration, CompiledCondition> withinTableLowerGranularityCompileCondition = new HashMap<>();
Expression lowerGranularity;
if (isDistributed) {
for (int i = 0; i < lowerGranularitySize; i++) {
if (isProcessingOnExternalTime) {
lowerGranularity = Expression.and(
Expression.compare(
Expression.variable("AGG_TIMESTAMP"),
Compare.Operator.GREATER_THAN_EQUAL,
Expression.variable(lowerGranularityAttributes.get(i))),
withinExpressionTable
);
} else {
if (shouldApplyReducedCondition) {
lowerGranularity = Expression.and(
Expression.compare(
Expression.variable("AGG_TIMESTAMP"),
Compare.Operator.GREATER_THAN_EQUAL,
Expression.variable(lowerGranularityAttributes.get(i))),
reducedExpression
);
} else {
lowerGranularity =
Expression.compare(
Expression.variable("AGG_TIMESTAMP"),
Compare.Operator.GREATER_THAN_EQUAL,
Expression.variable(lowerGranularityAttributes.get(i)));
}
}
TimePeriod.Duration duration = this.incrementalDurations.get(i);
String tableName = aggregationName + "_" + duration.toString();
CompiledCondition compiledCondition = tableMap.get(tableName).compileCondition(lowerGranularity,
metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups, tableMap,
siddhiQueryContext);
withinTableLowerGranularityCompileCondition.put(duration, compiledCondition);
}
}
QueryParserHelper.reduceMetaComplexEvent(metaInfoHolderForTableLookups.getMetaStateEvent());
// On compile condition.
// After finding all the aggregates belonging to within duration, the final on condition (given as
// "on stream1.name == aggregator.nickName ..." in the join query) must be executed on that data.
// This condition is used for that purpose.
onCompiledCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), expression,
matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiQueryContext);
return new IncrementalAggregateCompileCondition(aggregationName, isProcessingOnExternalTime, isDistributed,
incrementalDurations, aggregationTables, outputExpressionExecutors,
isOptimisedTableLookup, withinTableCompiledSelection, withinTableCompiledConditions,
withinInMemoryCompileCondition, withinTableLowerGranularityCompileCondition, onCompiledCondition,
additionalAttributes, perExpressionExecutor, startTimeEndTimeExpressionExecutor,
timestampFilterExecutors, aggregateMetaSteamEvent, matchingMetaInfoHolder,
metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups);
}
private static List<OutputAttribute> constructSelectorList(boolean isProcessingOnExternalTime,
boolean isDistributed,
boolean isLatestEventColAdded,
int baseAggregatorBeginIndex,
int numGroupByVariables,
List<Expression> finalBaseExpressions,
AbstractDefinition incomingOutputStreamDefinition,
List<Variable> newGroupByList) {
List<OutputAttribute> selectorList = new ArrayList<>();
List<Attribute> attributeList = incomingOutputStreamDefinition.getAttributeList();
List<String> queryGroupByNames = newGroupByList.stream()
.map(Variable::getAttributeName).collect(Collectors.toList());
Variable maxVariable;
if (!isProcessingOnExternalTime) {
maxVariable = new Variable(AGG_START_TIMESTAMP_COL);
} else if (isLatestEventColAdded) {
maxVariable = new Variable(AGG_LAST_TIMESTAMP_COL);
} else {
maxVariable = new Variable(AGG_EXTERNAL_TIMESTAMP_COL);
}
int i = 0;
//Add timestamp selector
OutputAttribute timestampAttribute;
if (!isProcessingOnExternalTime && queryGroupByNames.contains(AGG_START_TIMESTAMP_COL)) {
timestampAttribute = new OutputAttribute(new Variable(AGG_START_TIMESTAMP_COL));
} else {
timestampAttribute = new OutputAttribute(attributeList.get(i).getName(),
Expression.function("max", new Variable(AGG_START_TIMESTAMP_COL)));
}
selectorList.add(timestampAttribute);
i++;
if (isDistributed) {
selectorList.add(new OutputAttribute(AGG_SHARD_ID_COL, Expression.function("max",
new Variable(AGG_SHARD_ID_COL))));
i++;
}
if (isProcessingOnExternalTime) {
OutputAttribute externalTimestampAttribute;
if (queryGroupByNames.contains(AGG_START_TIMESTAMP_COL)) {
externalTimestampAttribute = new OutputAttribute(new Variable(AGG_EXTERNAL_TIMESTAMP_COL));
} else {
externalTimestampAttribute = new OutputAttribute(attributeList.get(i).getName(),
Expression.function("max", new Variable(AGG_EXTERNAL_TIMESTAMP_COL)));
}
selectorList.add(externalTimestampAttribute);
i++;
}
for (int j = 0; j < numGroupByVariables; j++) {
OutputAttribute groupByAttribute;
Variable variable = new Variable(attributeList.get(i).getName());
if (queryGroupByNames.contains(variable.getAttributeName())) {
groupByAttribute = new OutputAttribute(variable);
} else {
groupByAttribute = new OutputAttribute(variable.getAttributeName(),
Expression.function("max", new Variable(variable.getAttributeName())));
}
selectorList.add(groupByAttribute);
i++;
}
if (isLatestEventColAdded) {
baseAggregatorBeginIndex = baseAggregatorBeginIndex - 1;
}
for (; i < baseAggregatorBeginIndex; i++) {
OutputAttribute outputAttribute = new OutputAttribute(attributeList.get(i).getName(),
Expression.function("incrementalAggregator", "last",
new Variable(attributeList.get(i).getName()), maxVariable));
selectorList.add(outputAttribute);
}
if (isLatestEventColAdded) {
OutputAttribute lastTimestampAttribute = new OutputAttribute(AGG_LAST_TIMESTAMP_COL,
Expression.function("max", new Variable(AGG_LAST_TIMESTAMP_COL)));
selectorList.add(lastTimestampAttribute);
i++;
}
for (Expression finalBaseExpression : finalBaseExpressions) {
OutputAttribute outputAttribute = new OutputAttribute(attributeList.get(i).getName(), finalBaseExpression);
selectorList.add(outputAttribute);
i++;
}
return selectorList;
}
public void startPurging() {
incrementalDataPurger.executeIncrementalDataPurging();
}
public void initialiseExecutors(boolean isFirstEventArrived) {
// State only updated when first event arrives to IncrementalAggregationProcessor
if (isFirstEventArrived) {
this.isFirstEventArrived = true;
for (Map.Entry<TimePeriod.Duration, IncrementalExecutor> durationIncrementalExecutorEntry :
this.incrementalExecutorMap.entrySet()) {
durationIncrementalExecutorEntry.getValue().setProcessingExecutor(true);
}
}
this.incrementalExecutorsInitialiser.initialiseExecutors();
}
public void processEvents(ComplexEventChunk<StreamEvent> streamEventComplexEventChunk) {
incrementalExecutorMap.get(incrementalDurations.get(0)).execute(streamEventComplexEventChunk);
}
}
|
modules/siddhi-core/src/main/java/io/siddhi/core/aggregation/AggregationRuntime.java
|
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.core.aggregation;
import io.siddhi.core.config.SiddhiQueryContext;
import io.siddhi.core.event.ComplexEventChunk;
import io.siddhi.core.event.state.MetaStateEvent;
import io.siddhi.core.event.state.StateEvent;
import io.siddhi.core.event.stream.MetaStreamEvent;
import io.siddhi.core.event.stream.StreamEvent;
import io.siddhi.core.exception.SiddhiAppCreationException;
import io.siddhi.core.executor.ConstantExpressionExecutor;
import io.siddhi.core.executor.ExpressionExecutor;
import io.siddhi.core.executor.VariableExpressionExecutor;
import io.siddhi.core.query.input.stream.single.SingleStreamRuntime;
import io.siddhi.core.query.processor.ProcessingMode;
import io.siddhi.core.query.processor.stream.window.QueryableProcessor;
import io.siddhi.core.query.selector.GroupByKeyGenerator;
import io.siddhi.core.table.Table;
import io.siddhi.core.util.collection.operator.CompiledCondition;
import io.siddhi.core.util.collection.operator.CompiledSelection;
import io.siddhi.core.util.collection.operator.IncrementalAggregateCompileCondition;
import io.siddhi.core.util.collection.operator.MatchingMetaInfoHolder;
import io.siddhi.core.util.parser.ExpressionParser;
import io.siddhi.core.util.parser.OperatorParser;
import io.siddhi.core.util.parser.helper.QueryParserHelper;
import io.siddhi.core.util.snapshot.SnapshotService;
import io.siddhi.core.util.statistics.LatencyTracker;
import io.siddhi.core.util.statistics.MemoryCalculable;
import io.siddhi.core.util.statistics.ThroughputTracker;
import io.siddhi.core.util.statistics.metrics.Level;
import io.siddhi.query.api.aggregation.TimePeriod;
import io.siddhi.query.api.aggregation.Within;
import io.siddhi.query.api.definition.AbstractDefinition;
import io.siddhi.query.api.definition.AggregationDefinition;
import io.siddhi.query.api.definition.Attribute;
import io.siddhi.query.api.definition.StreamDefinition;
import io.siddhi.query.api.exception.SiddhiAppValidationException;
import io.siddhi.query.api.execution.query.selection.OutputAttribute;
import io.siddhi.query.api.execution.query.selection.Selector;
import io.siddhi.query.api.expression.AttributeFunction;
import io.siddhi.query.api.expression.Expression;
import io.siddhi.query.api.expression.Variable;
import io.siddhi.query.api.expression.condition.Compare;
import io.siddhi.query.api.expression.constant.BoolConstant;
import org.apache.log4j.Logger;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static io.siddhi.core.util.SiddhiConstants.AGG_EXTERNAL_TIMESTAMP_COL;
import static io.siddhi.core.util.SiddhiConstants.AGG_LAST_TIMESTAMP_COL;
import static io.siddhi.core.util.SiddhiConstants.AGG_SHARD_ID_COL;
import static io.siddhi.core.util.SiddhiConstants.AGG_START_TIMESTAMP_COL;
import static io.siddhi.core.util.SiddhiConstants.UNKNOWN_STATE;
import static io.siddhi.query.api.expression.Expression.Time.normalizeDuration;
/**
* Aggregation runtime managing aggregation operations for aggregation definition.
*/
public class AggregationRuntime implements MemoryCalculable {
private static final Logger LOG = Logger.getLogger(AggregationRuntime.class);
private AggregationDefinition aggregationDefinition;
private boolean isProcessingOnExternalTime;
private boolean isDistributed;
private List<TimePeriod.Duration> incrementalDurations;
private Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap;
private Map<TimePeriod.Duration, Table> aggregationTables;
private List<String> tableAttributesNameList;
private MetaStreamEvent aggregateMetaSteamEvent;
private List<ExpressionExecutor> outputExpressionExecutors;
private Map<TimePeriod.Duration, List<ExpressionExecutor>> aggregateProcessingExecutorsMap;
private ExpressionExecutor shouldUpdateTimestamp;
private Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorMap;
private boolean isOptimisedLookup;
private List<OutputAttribute> defaultSelectorList;
private List<String> groupByVariablesList;
private boolean isLatestEventColAdded;
private int baseAggregatorBeginIndex;
private List<Expression> finalBaseExpressionsList;
private IncrementalDataPurger incrementalDataPurger;
private IncrementalExecutorsInitialiser incrementalExecutorsInitialiser;
private SingleStreamRuntime singleStreamRuntime;
private LatencyTracker latencyTrackerFind;
private ThroughputTracker throughputTrackerFind;
private boolean isFirstEventArrived;
public AggregationRuntime(AggregationDefinition aggregationDefinition, boolean isProcessingOnExternalTime,
boolean isDistributed, List<TimePeriod.Duration> incrementalDurations,
Map<TimePeriod.Duration, IncrementalExecutor> incrementalExecutorMap,
Map<TimePeriod.Duration, Table> aggregationTables,
List<ExpressionExecutor> outputExpressionExecutors,
Map<TimePeriod.Duration, List<ExpressionExecutor>> aggregateProcessingExecutorsMap,
ExpressionExecutor shouldUpdateTimestamp,
Map<TimePeriod.Duration, GroupByKeyGenerator> groupByKeyGeneratorMap,
boolean isOptimisedLookup, List<OutputAttribute> defaultSelectorList,
List<String> groupByVariablesList,
boolean isLatestEventColAdded, int baseAggregatorBeginIndex,
List<Expression> finalBaseExpressionList, IncrementalDataPurger incrementalDataPurger,
IncrementalExecutorsInitialiser incrementalExecutorInitialiser,
SingleStreamRuntime singleStreamRuntime, MetaStreamEvent tableMetaStreamEvent,
LatencyTracker latencyTrackerFind, ThroughputTracker throughputTrackerFind) {
this.aggregationDefinition = aggregationDefinition;
this.isProcessingOnExternalTime = isProcessingOnExternalTime;
this.isDistributed = isDistributed;
this.incrementalDurations = incrementalDurations;
this.incrementalExecutorMap = incrementalExecutorMap;
this.aggregationTables = aggregationTables;
this.tableAttributesNameList = tableMetaStreamEvent.getInputDefinitions().get(0).getAttributeList()
.stream().map(Attribute::getName).collect(Collectors.toList());
this.outputExpressionExecutors = outputExpressionExecutors;
this.aggregateProcessingExecutorsMap = aggregateProcessingExecutorsMap;
this.shouldUpdateTimestamp = shouldUpdateTimestamp;
this.groupByKeyGeneratorMap = groupByKeyGeneratorMap;
this.isOptimisedLookup = isOptimisedLookup;
this.defaultSelectorList = defaultSelectorList;
this.groupByVariablesList = groupByVariablesList;
this.isLatestEventColAdded = isLatestEventColAdded;
this.baseAggregatorBeginIndex = baseAggregatorBeginIndex;
this.finalBaseExpressionsList = finalBaseExpressionList;
this.incrementalDataPurger = incrementalDataPurger;
this.incrementalExecutorsInitialiser = incrementalExecutorInitialiser;
this.singleStreamRuntime = singleStreamRuntime;
this.aggregateMetaSteamEvent = new MetaStreamEvent();
aggregationDefinition.getAttributeList().forEach(this.aggregateMetaSteamEvent::addOutputData);
this.latencyTrackerFind = latencyTrackerFind;
this.throughputTrackerFind = throughputTrackerFind;
}
private static void initMetaStreamEvent(MetaStreamEvent metaStreamEvent, AbstractDefinition inputDefinition,
String inputReferenceId) {
metaStreamEvent.addInputDefinition(inputDefinition);
metaStreamEvent.setInputReferenceId(inputReferenceId);
metaStreamEvent.initializeAfterWindowData();
inputDefinition.getAttributeList().forEach(metaStreamEvent::addData);
}
private static MetaStreamEvent alterMetaStreamEvent(boolean isStoreQuery, MetaStreamEvent originalMetaStreamEvent,
List<Attribute> additionalAttributes) {
StreamDefinition alteredStreamDef = new StreamDefinition();
if (!isStoreQuery) {
for (Attribute attribute : originalMetaStreamEvent.getLastInputDefinition().getAttributeList()) {
alteredStreamDef.attribute(attribute.getName(), attribute.getType());
}
}
additionalAttributes.forEach(attribute -> alteredStreamDef.attribute(attribute.getName(), attribute.getType()));
initMetaStreamEvent(originalMetaStreamEvent, alteredStreamDef, originalMetaStreamEvent.getInputReferenceId());
return originalMetaStreamEvent;
}
private static MetaStreamEvent createMetaStoreEvent(AbstractDefinition tableDefinition, String referenceId) {
MetaStreamEvent metaStreamEventForTable = new MetaStreamEvent();
metaStreamEventForTable.setEventType(MetaStreamEvent.EventType.TABLE);
initMetaStreamEvent(metaStreamEventForTable, tableDefinition, referenceId);
return metaStreamEventForTable;
}
private static MatchingMetaInfoHolder alterMetaInfoHolderForStoreQuery(
MetaStreamEvent newMetaStreamEventWithStartEnd, MatchingMetaInfoHolder matchingMetaInfoHolder) {
MetaStateEvent metaStateEvent = new MetaStateEvent(2);
MetaStreamEvent incomingMetaStreamEvent = matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvent(0);
metaStateEvent.addEvent(newMetaStreamEventWithStartEnd);
metaStateEvent.addEvent(incomingMetaStreamEvent);
return new MatchingMetaInfoHolder(metaStateEvent, 0, 1,
newMetaStreamEventWithStartEnd.getLastInputDefinition(),
incomingMetaStreamEvent.getLastInputDefinition(), UNKNOWN_STATE);
}
private static MatchingMetaInfoHolder createNewStreamTableMetaInfoHolder(MetaStreamEvent metaStreamEvent,
MetaStreamEvent metaStoreEvent) {
MetaStateEvent metaStateEvent = new MetaStateEvent(2);
metaStateEvent.addEvent(metaStreamEvent);
metaStateEvent.addEvent(metaStoreEvent);
return new MatchingMetaInfoHolder(metaStateEvent, 0, 1,
metaStreamEvent.getLastInputDefinition(), metaStoreEvent.getLastInputDefinition(), UNKNOWN_STATE);
}
public AggregationDefinition getAggregationDefinition() {
return aggregationDefinition;
}
public SingleStreamRuntime getSingleStreamRuntime() {
return singleStreamRuntime;
}
public StreamEvent find(StateEvent matchingEvent, CompiledCondition compiledCondition,
SiddhiQueryContext siddhiQueryContext) {
try {
SnapshotService.getSkipStateStorageThreadLocal().set(true);
if (latencyTrackerFind != null &&
Level.BASIC.compareTo(siddhiQueryContext.getSiddhiAppContext().getRootMetricsLevel()) <= 0) {
latencyTrackerFind.markIn();
throughputTrackerFind.eventIn();
}
if (!isDistributed && !isFirstEventArrived) {
// No need to initialise executors if it is distributed
initialiseExecutors(false);
}
return ((IncrementalAggregateCompileCondition) compiledCondition).find(matchingEvent,
incrementalExecutorMap, aggregateProcessingExecutorsMap, groupByKeyGeneratorMap,
shouldUpdateTimestamp);
} finally {
SnapshotService.getSkipStateStorageThreadLocal().set(null);
if (latencyTrackerFind != null &&
Level.BASIC.compareTo(siddhiQueryContext.getSiddhiAppContext().getRootMetricsLevel()) <= 0) {
latencyTrackerFind.markOut();
}
}
}
public CompiledCondition compileExpression(Expression expression, Within within, Expression per,
List<Variable> queryGroupByList,
MatchingMetaInfoHolder matchingMetaInfoHolder,
List<VariableExpressionExecutor> variableExpressionExecutors,
Map<String, Table> tableMap, SiddhiQueryContext siddhiQueryContext) {
String aggregationName = aggregationDefinition.getId();
boolean isOptimisedTableLookup = isOptimisedLookup;
Map<TimePeriod.Duration, CompiledCondition> withinTableCompiledConditions = new HashMap<>();
CompiledCondition withinInMemoryCompileCondition;
CompiledCondition onCompiledCondition;
List<Attribute> additionalAttributes = new ArrayList<>();
// Define additional attribute list
additionalAttributes.add(new Attribute("_START", Attribute.Type.LONG));
additionalAttributes.add(new Attribute("_END", Attribute.Type.LONG));
int lowerGranularitySize = this.incrementalDurations.size() - 1;
List<String> lowerGranularityAttributes = new ArrayList<>();
if (isDistributed) {
//Add additional attributes to get base aggregation timestamps based on current timestamps
// for values calculated in in-memory in the shards
for (int i = 0; i < lowerGranularitySize; i++) {
String attributeName = "_AGG_TIMESTAMP_FILTER_" + i;
additionalAttributes.add(new Attribute(attributeName, Attribute.Type.LONG));
lowerGranularityAttributes.add(attributeName);
}
}
// Get table definition. Table definitions for all the tables used to persist aggregates are similar.
// Therefore it's enough to get the definition from one table.
AbstractDefinition tableDefinition = aggregationTables.get(incrementalDurations.get(0)).getTableDefinition();
boolean isStoreQuery = matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvents().length == 1;
// Alter existing meta stream event or create new one if a meta stream doesn't exist
// After calling this method the original MatchingMetaInfoHolder's meta stream event would be altered
// Alter meta info holder to contain stream event and aggregate both when it's a store query
MetaStreamEvent metaStreamEventForTableLookups;
if (isStoreQuery) {
metaStreamEventForTableLookups = alterMetaStreamEvent(true, new MetaStreamEvent(), additionalAttributes);
matchingMetaInfoHolder = alterMetaInfoHolderForStoreQuery(metaStreamEventForTableLookups,
matchingMetaInfoHolder);
} else {
metaStreamEventForTableLookups = alterMetaStreamEvent(false,
matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvent(0), additionalAttributes);
}
// Create new MatchingMetaInfoHolder containing newMetaStreamEventWithStartEnd and table meta event
String aggReferenceId = matchingMetaInfoHolder.getMetaStateEvent().getMetaStreamEvent(1).getInputReferenceId();
MetaStreamEvent metaStoreEventForTableLookups = createMetaStoreEvent(tableDefinition,
aggReferenceId);
// Create new MatchingMetaInfoHolder containing metaStreamEventForTableLookups and table meta event
MatchingMetaInfoHolder metaInfoHolderForTableLookups = createNewStreamTableMetaInfoHolder(
metaStreamEventForTableLookups, metaStoreEventForTableLookups);
// Create per expression executor
ExpressionExecutor perExpressionExecutor;
if (per != null) {
perExpressionExecutor = ExpressionParser.parseExpression(per, matchingMetaInfoHolder.getMetaStateEvent(),
matchingMetaInfoHolder.getCurrentState(), tableMap, variableExpressionExecutors,
false, 0, ProcessingMode.BATCH, false, siddhiQueryContext);
if (perExpressionExecutor.getReturnType() != Attribute.Type.STRING) {
throw new SiddhiAppCreationException(
"Query " + siddhiQueryContext.getName() + "'s per value expected a string but found "
+ perExpressionExecutor.getReturnType(),
per.getQueryContextStartIndex(), per.getQueryContextEndIndex());
}
// Additional Per time function verification at compile time if it is a constant
if (perExpressionExecutor instanceof ConstantExpressionExecutor) {
String perValue = ((ConstantExpressionExecutor) perExpressionExecutor).getValue().toString();
try {
normalizeDuration(perValue);
} catch (SiddhiAppValidationException e) {
throw new SiddhiAppValidationException(
"Aggregation Query's per value is expected to be of a valid time function of the " +
"following " + TimePeriod.Duration.SECONDS + ", " + TimePeriod.Duration.MINUTES
+ ", " + TimePeriod.Duration.HOURS + ", " + TimePeriod.Duration.DAYS + ", "
+ TimePeriod.Duration.MONTHS + ", " + TimePeriod.Duration.YEARS + ".");
}
}
} else {
throw new SiddhiAppCreationException("Syntax Error: Aggregation join query must contain a `per` " +
"definition for granularity");
}
// Create start and end time expression
Expression startEndTimeExpression;
ExpressionExecutor startTimeEndTimeExpressionExecutor;
if (within != null) {
if (within.getTimeRange().size() == 1) {
startEndTimeExpression = new AttributeFunction("incrementalAggregator",
"startTimeEndTime", within.getTimeRange().get(0));
} else { // within.getTimeRange().size() == 2
startEndTimeExpression = new AttributeFunction("incrementalAggregator",
"startTimeEndTime", within.getTimeRange().get(0), within.getTimeRange().get(1));
}
startTimeEndTimeExpressionExecutor = ExpressionParser.parseExpression(startEndTimeExpression,
matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap,
variableExpressionExecutors, false, 0,
ProcessingMode.BATCH, false, siddhiQueryContext);
} else {
throw new SiddhiAppCreationException("Syntax Error : Aggregation read query must contain a `within` " +
"definition for filtering of aggregation data.");
}
// Create within expression
Expression timeFilterExpression;
if (isProcessingOnExternalTime) {
timeFilterExpression = Expression.variable(AGG_EXTERNAL_TIMESTAMP_COL);
} else {
timeFilterExpression = Expression.variable(AGG_START_TIMESTAMP_COL);
}
Expression withinExpression;
Expression start = Expression.variable(additionalAttributes.get(0).getName());
Expression end = Expression.variable(additionalAttributes.get(1).getName());
Expression compareWithStartTime = Compare.compare(start, Compare.Operator.LESS_THAN_EQUAL,
timeFilterExpression);
Expression compareWithEndTime = Compare.compare(timeFilterExpression, Compare.Operator.LESS_THAN, end);
withinExpression = Expression.and(compareWithStartTime, compareWithEndTime);
List<ExpressionExecutor> timestampFilterExecutors = new ArrayList<>();
if (isDistributed) {
for (int i = 0; i < lowerGranularitySize; i++) {
Expression[] expressionArray = new Expression[]{
new AttributeFunction("", "currentTimeMillis", null),
Expression.value(this.incrementalDurations.get(i + 1).toString())};
Expression filterExpression = new AttributeFunction("incrementalAggregator",
"getAggregationStartTime", expressionArray);
timestampFilterExecutors.add(ExpressionParser.parseExpression(filterExpression,
matchingMetaInfoHolder.getMetaStateEvent(), matchingMetaInfoHolder.getCurrentState(), tableMap,
variableExpressionExecutors, false, 0,
ProcessingMode.BATCH, false, siddhiQueryContext));
}
}
// Create compile condition per each table used to persist aggregates.
// These compile conditions are used to check whether the aggregates in tables are within the given duration.
// Combine with and on condition for table query
boolean shouldApplyReducedCondition = false;
Expression reducedExpression = null;
//Check if there is no on conditions
if (!(expression instanceof BoolConstant)) {
AggregationExpressionBuilder aggregationExpressionBuilder = new AggregationExpressionBuilder(expression);
AggregationExpressionVisitor expressionVisitor = new AggregationExpressionVisitor(
metaStreamEventForTableLookups.getInputReferenceId(),
metaStreamEventForTableLookups.getLastInputDefinition().getAttributeList(),
this.tableAttributesNameList
);
aggregationExpressionBuilder.build(expressionVisitor);
shouldApplyReducedCondition = expressionVisitor.applyReducedExpression();
reducedExpression = expressionVisitor.getReducedExpression();
}
Expression withinExpressionTable;
if (shouldApplyReducedCondition) {
withinExpressionTable = Expression.and(withinExpression, reducedExpression);
} else {
withinExpressionTable = withinExpression;
}
Variable timestampVariable = new Variable(AGG_START_TIMESTAMP_COL);
List<String> queryGroupByNamesList = queryGroupByList.stream()
.map(Variable::getAttributeName)
.collect(Collectors.toList());
boolean queryGroupByContainsTimestamp = queryGroupByNamesList.remove(AGG_START_TIMESTAMP_COL);
boolean isQueryGroupBySame = queryGroupByList.isEmpty() ||
(queryGroupByList.contains(timestampVariable) && queryGroupByNamesList.equals(groupByVariablesList));
List<VariableExpressionExecutor> variableExpExecutorsForTableLookups = new ArrayList<>();
Map<TimePeriod.Duration, CompiledSelection> withinTableCompiledSelection = new HashMap<>();
if (isOptimisedTableLookup) {
Selector selector = Selector.selector();
List<Variable> groupByList = new ArrayList<>();
if (!isQueryGroupBySame) {
if (queryGroupByContainsTimestamp) {
if (isProcessingOnExternalTime) {
groupByList.add(new Variable(AGG_EXTERNAL_TIMESTAMP_COL));
} else {
groupByList.add(new Variable(AGG_START_TIMESTAMP_COL));
}
//Remove timestamp to process the rest
queryGroupByList.remove(timestampVariable);
}
for (Variable queryGroupBy : queryGroupByList) {
if (groupByVariablesList.contains(queryGroupBy.getAttributeName())) {
String referenceId = queryGroupBy.getStreamId();
if (aggReferenceId == null || aggReferenceId.equalsIgnoreCase(referenceId)) {
groupByList.add(queryGroupBy);
}
}
}
// If query group bys are based on joining stream
if (groupByList.isEmpty()) {
isQueryGroupBySame = true;
}
}
if (aggReferenceId != null) {
groupByList.forEach((groupBy) -> groupBy.setStreamId(aggReferenceId));
}
selector.addGroupByList(groupByList);
List<OutputAttribute> selectorList;
if (!isQueryGroupBySame) {
selectorList = constructSelectorList(isProcessingOnExternalTime, isDistributed, isLatestEventColAdded,
baseAggregatorBeginIndex, groupByVariablesList.size(), finalBaseExpressionsList,
tableDefinition, groupByList);
} else {
selectorList = defaultSelectorList;
}
if (aggReferenceId != null) {
for (OutputAttribute outputAttribute : selectorList) {
if (outputAttribute.getExpression() instanceof Variable) {
((Variable) outputAttribute.getExpression()).setStreamId(aggReferenceId);
} else {
for (Expression parameter :
((AttributeFunction) outputAttribute.getExpression()).getParameters()) {
((Variable) parameter).setStreamId(aggReferenceId);
}
}
}
}
selector.addSelectionList(selectorList);
try {
aggregationTables.entrySet().forEach(
(durationTableEntry -> {
CompiledSelection compiledSelection = ((QueryableProcessor) durationTableEntry.getValue())
.compileSelection(
selector, tableDefinition.getAttributeList(), metaInfoHolderForTableLookups,
variableExpExecutorsForTableLookups, tableMap, siddhiQueryContext
);
withinTableCompiledSelection.put(durationTableEntry.getKey(), compiledSelection);
})
);
} catch (SiddhiAppCreationException | QueryableRecordTableException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Aggregation Query optimization failed for aggregation: '" + aggregationName + "'. " +
"Creating table lookup query in normal mode. Reason for failure: " + e.getMessage(), e);
}
isOptimisedTableLookup = false;
}
}
for (Map.Entry<TimePeriod.Duration, Table> entry : aggregationTables.entrySet()) {
CompiledCondition withinTableCompileCondition = entry.getValue().compileCondition(withinExpressionTable,
metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups, tableMap,
siddhiQueryContext);
withinTableCompiledConditions.put(entry.getKey(), withinTableCompileCondition);
}
// Create compile condition for in-memory data.
// This compile condition is used to check whether the running aggregates (in-memory data)
// are within given duration
withinInMemoryCompileCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true),
withinExpression, metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups, tableMap,
siddhiQueryContext);
// Create compile condition for in-memory data, in case of distributed
// Look at the lower level granularities
Map<TimePeriod.Duration, CompiledCondition> withinTableLowerGranularityCompileCondition = new HashMap<>();
Expression lowerGranularity;
if (isDistributed) {
for (int i = 0; i < lowerGranularitySize; i++) {
if (isProcessingOnExternalTime) {
lowerGranularity = Expression.and(
Expression.compare(
Expression.variable("AGG_TIMESTAMP"),
Compare.Operator.GREATER_THAN_EQUAL,
Expression.variable(lowerGranularityAttributes.get(i))),
withinExpressionTable
);
} else {
if (shouldApplyReducedCondition) {
lowerGranularity = Expression.and(
Expression.compare(
Expression.variable("AGG_TIMESTAMP"),
Compare.Operator.GREATER_THAN_EQUAL,
Expression.variable(lowerGranularityAttributes.get(i))),
reducedExpression
);
} else {
lowerGranularity =
Expression.compare(
Expression.variable("AGG_TIMESTAMP"),
Compare.Operator.GREATER_THAN_EQUAL,
Expression.variable(lowerGranularityAttributes.get(i)));
}
}
TimePeriod.Duration duration = this.incrementalDurations.get(i);
String tableName = aggregationName + "_" + duration.toString();
CompiledCondition compiledCondition = tableMap.get(tableName).compileCondition(lowerGranularity,
metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups, tableMap,
siddhiQueryContext);
withinTableLowerGranularityCompileCondition.put(duration, compiledCondition);
}
}
QueryParserHelper.reduceMetaComplexEvent(metaInfoHolderForTableLookups.getMetaStateEvent());
// On compile condition.
// After finding all the aggregates belonging to within duration, the final on condition (given as
// "on stream1.name == aggregator.nickName ..." in the join query) must be executed on that data.
// This condition is used for that purpose.
onCompiledCondition = OperatorParser.constructOperator(new ComplexEventChunk<>(true), expression,
matchingMetaInfoHolder, variableExpressionExecutors, tableMap, siddhiQueryContext);
return new IncrementalAggregateCompileCondition(aggregationName, isProcessingOnExternalTime, isDistributed,
incrementalDurations, aggregationTables, outputExpressionExecutors,
isOptimisedTableLookup, withinTableCompiledSelection, withinTableCompiledConditions,
withinInMemoryCompileCondition, withinTableLowerGranularityCompileCondition, onCompiledCondition,
additionalAttributes, perExpressionExecutor, startTimeEndTimeExpressionExecutor,
timestampFilterExecutors, aggregateMetaSteamEvent, matchingMetaInfoHolder,
metaInfoHolderForTableLookups, variableExpExecutorsForTableLookups);
}
private static List<OutputAttribute> constructSelectorList(boolean isProcessingOnExternalTime,
boolean isDistributed,
boolean isLatestEventColAdded,
int baseAggregatorBeginIndex,
int numGroupByVariables,
List<Expression> finalBaseExpressions,
AbstractDefinition incomingOutputStreamDefinition,
List<Variable> newGroupByList) {
List<OutputAttribute> selectorList = new ArrayList<>();
List<Attribute> attributeList = incomingOutputStreamDefinition.getAttributeList();
List<String> queryGroupByNames = newGroupByList.stream()
.map(Variable::getAttributeName).collect(Collectors.toList());
Variable maxVariable;
if (!isProcessingOnExternalTime) {
maxVariable = new Variable(AGG_START_TIMESTAMP_COL);
} else if (isLatestEventColAdded) {
maxVariable = new Variable(AGG_LAST_TIMESTAMP_COL);
} else {
maxVariable = new Variable(AGG_EXTERNAL_TIMESTAMP_COL);
}
int i = 0;
//Add timestamp selector
OutputAttribute timestampAttribute;
if (!isProcessingOnExternalTime && queryGroupByNames.contains(AGG_START_TIMESTAMP_COL)) {
timestampAttribute = new OutputAttribute(new Variable(AGG_START_TIMESTAMP_COL));
} else {
timestampAttribute = new OutputAttribute(attributeList.get(i).getName(),
Expression.function("max", new Variable(AGG_START_TIMESTAMP_COL)));
}
selectorList.add(timestampAttribute);
i++;
if (isDistributed) {
selectorList.add(new OutputAttribute(AGG_SHARD_ID_COL, Expression.function("max",
new Variable(AGG_SHARD_ID_COL))));
i++;
}
if (isProcessingOnExternalTime) {
OutputAttribute externalTimestampAttribute;
if (queryGroupByNames.contains(AGG_START_TIMESTAMP_COL)) {
externalTimestampAttribute = new OutputAttribute(new Variable(AGG_EXTERNAL_TIMESTAMP_COL));
} else {
externalTimestampAttribute = new OutputAttribute(attributeList.get(i).getName(),
Expression.function("max", new Variable(AGG_EXTERNAL_TIMESTAMP_COL)));
}
selectorList.add(externalTimestampAttribute);
i++;
}
for (int j = 0; j < numGroupByVariables; j++) {
OutputAttribute groupByAttribute;
Variable variable = new Variable(attributeList.get(i).getName());
if (queryGroupByNames.contains(variable.getAttributeName())) {
groupByAttribute = new OutputAttribute(variable);
} else {
groupByAttribute = new OutputAttribute(variable.getAttributeName(),
Expression.function("max", new Variable(variable.getAttributeName())));
}
selectorList.add(groupByAttribute);
i++;
}
if (isLatestEventColAdded) {
baseAggregatorBeginIndex = baseAggregatorBeginIndex - 1;
}
for (; i < baseAggregatorBeginIndex; i++) {
OutputAttribute outputAttribute = new OutputAttribute(attributeList.get(i).getName(),
Expression.function("incrementalAggregator", "last",
new Variable(attributeList.get(i).getName()), maxVariable));
selectorList.add(outputAttribute);
}
if (isLatestEventColAdded) {
OutputAttribute lastTimestampAttribute = new OutputAttribute(AGG_LAST_TIMESTAMP_COL,
Expression.function("max", new Variable(AGG_LAST_TIMESTAMP_COL)));
selectorList.add(lastTimestampAttribute);
i++;
}
for (Expression finalBaseExpression : finalBaseExpressions) {
OutputAttribute outputAttribute = new OutputAttribute(attributeList.get(i).getName(), finalBaseExpression);
selectorList.add(outputAttribute);
i++;
}
return selectorList;
}
public void startPurging() {
incrementalDataPurger.executeIncrementalDataPurging();
}
public void initialiseExecutors(boolean isFirstEventArrived) {
// State only updated when first event arrives to IncrementalAggregationProcessor
if (isFirstEventArrived) {
this.isFirstEventArrived = true;
for (Map.Entry<TimePeriod.Duration, IncrementalExecutor> durationIncrementalExecutorEntry :
this.incrementalExecutorMap.entrySet()) {
durationIncrementalExecutorEntry.getValue().setProcessingExecutor(true);
}
}
this.incrementalExecutorsInitialiser.initialiseExecutors();
}
public void processEvents(ComplexEventChunk<StreamEvent> streamEventComplexEventChunk) {
incrementalExecutorMap.get(incrementalDurations.get(0)).execute(streamEventComplexEventChunk);
}
}
|
Add missed import statement
|
modules/siddhi-core/src/main/java/io/siddhi/core/aggregation/AggregationRuntime.java
|
Add missed import statement
|
|
Java
|
apache-2.0
|
ca697d9a8d3305b25395dae0f97fc3d64c0aa44c
| 0
|
mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.persistence.query;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.usergrid.CoreApplication;
import org.apache.usergrid.CoreITSetup;
import org.apache.usergrid.CoreITSetupImpl;
import org.apache.usergrid.persistence.Results;
import org.apache.usergrid.persistence.Query;
import static org.junit.Assert.assertEquals;
/**
* Tests sub entites in full results
*/
public class NotSubPropertyIT {
private static final Logger LOG = LoggerFactory.getLogger( IntersectionUnionPagingIT.class );
private static final String notQuery = "select * where NOT subArray.usageType = 'true' order by created asc";
private static final int PAGE_SIZE = 300;
@ClassRule
public static CoreITSetup setup = new CoreITSetupImpl( );
@Rule
public CoreApplication app = new CoreApplication( setup );
@Test
public void testNotPagingCollection() throws Exception {
final CollectionIoHelper collectionIoHelper = new CollectionIoHelper( app );
List<UUID> expected = performSetup( collectionIoHelper );
testSubPropertySearching( collectionIoHelper, notQuery, expected );
}
@Test
public void testNotPagingConnection() throws Exception {
final ConnectionHelper connectionHelper = new ConnectionHelper( app );
List<UUID> expected = performSetup( connectionHelper );
testSubPropertySearching( connectionHelper, notQuery, expected );
}
/**
* Perform the writes
*/
private List<UUID> performSetup( final IoHelper io ) throws Exception {
io.doSetup();
int size = 200;
long start = System.currentTimeMillis();
LOG.info( "Writing {} entities.", size );
List<UUID> expected = new ArrayList<UUID>( size );
for ( int i = 0; i < size; i++ ) {
Map<String, Object> entity = new HashMap<String, Object>();
final boolean usageTypeBool = i % 2 == 0;
final String usageType = String.valueOf( usageTypeBool );
List<Map<String, Object>> subArray = new ArrayList<Map<String, Object>>();
for ( int j = 0; j < 2; j++ ) {
Map<String, Object> subFields = new HashMap<String, Object>();
subFields.put( "startDate", 10000 );
subFields.put( "endDate", 20000 );
subFields.put( "usageType", usageType );
subArray.add( subFields );
}
entity.put( "subArray", subArray );
UUID entityId = io.writeEntity( entity ).getUuid();
if ( !usageTypeBool ) {
expected.add( entityId );
}
}
long stop = System.currentTimeMillis();
LOG.info( "Writes took {} ms", stop - start );
app.refreshIndex();
return expected;
}
private void testSubPropertySearching( final IoHelper io, final String queryString,
final List<UUID> expectedResults ) throws Exception {
//our field1Or has a result size < our page size, so it shouldn't blow up when the cursor is getting created
//the leaf iterator should insert it's own "no value left" into the cursor
Query query = Query.fromQL( queryString );
query.setLimit( PAGE_SIZE );
Results results;
long start = System.currentTimeMillis();
int expectedIndex = 0;
do {
// now do simple ordering, should be returned in order
results = io.getResults( query );
for ( int i = 0; i < results.size(); i++, expectedIndex++ ) {
final UUID returned = results.getEntities().get( i ).getUuid();
final UUID expected = expectedResults.get( expectedIndex );
assertEquals( "Not returned as excpected", expected, returned );
}
query.setOffsetFromCursor( results.getCursor() );
}
while ( results.getCursor() != null );
long stop = System.currentTimeMillis();
LOG.info( "Query took {} ms to return {} entities", stop - start, expectedResults.size() );
assertEquals( "All names returned", expectedResults.size(), expectedIndex );
}
}
|
stack/core/src/test/java/org/apache/usergrid/persistence/query/NotSubPropertyIT.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.persistence.query;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.usergrid.CoreApplication;
import org.apache.usergrid.CoreITSetup;
import org.apache.usergrid.CoreITSetupImpl;
import org.apache.usergrid.persistence.Results;
import org.apache.usergrid.persistence.Query;
import static org.junit.Assert.assertEquals;
/**
* Tests sub entites in full results
*/
public class NotSubPropertyIT {
private static final Logger LOG = LoggerFactory.getLogger( IntersectionUnionPagingIT.class );
private static final String notQuery = "select * where NOT subArray.usageType = 'true1'";
private static final int PAGE_SIZE = 300;
@ClassRule
public static CoreITSetup setup = new CoreITSetupImpl( );
@Rule
public CoreApplication app = new CoreApplication( setup );
@Test
public void testNotPagingCollection() throws Exception {
final CollectionIoHelper collectionIoHelper = new CollectionIoHelper( app );
List<UUID> expected = performSetup( collectionIoHelper );
testSubPropertySearching( collectionIoHelper, notQuery, expected );
}
@Test
public void testNotPagingConnection() throws Exception {
final ConnectionHelper connectionHelper = new ConnectionHelper( app );
List<UUID> expected = performSetup( connectionHelper );
testSubPropertySearching( connectionHelper, notQuery, expected );
}
/**
* Perform the writes
*/
private List<UUID> performSetup( final IoHelper io ) throws Exception {
io.doSetup();
int size = 200;
long start = System.currentTimeMillis();
LOG.info( "Writing {} entities.", size );
List<UUID> expected = new ArrayList<UUID>( size );
for ( int i = 0; i < size; i++ ) {
Map<String, Object> entity = new HashMap<String, Object>();
final boolean usageTypeBool = i % 2 == 0;
final String usageType = String.valueOf( usageTypeBool );
List<Map<String, Object>> subArray = new ArrayList<Map<String, Object>>();
for ( int j = 0; j < 2; j++ ) {
Map<String, Object> subFields = new HashMap<String, Object>();
subFields.put( "startDate", 10000 );
subFields.put( "endDate", 20000 );
subFields.put( "usageType", usageType + j );
subArray.add( subFields );
}
entity.put( "subArray", subArray );
UUID entityId = io.writeEntity( entity ).getUuid();
if ( !usageTypeBool ) {
expected.add( entityId );
}
}
long stop = System.currentTimeMillis();
LOG.info( "Writes took {} ms", stop - start );
return expected;
}
private void testSubPropertySearching( final IoHelper io, final String queryString,
final List<UUID> expectedResults ) throws Exception {
//our field1Or has a result size < our page size, so it shouldn't blow up when the cursor is getting created
//the leaf iterator should insert it's own "no value left" into the cursor
Query query = Query.fromQL( queryString );
query.setLimit( PAGE_SIZE );
Results results;
long start = System.currentTimeMillis();
int expectedIndex = 0;
do {
// now do simple ordering, should be returned in order
results = io.getResults( query );
for ( int i = 0; i < results.size(); i++, expectedIndex++ ) {
final UUID returned = results.getEntities().get( i ).getUuid();
final UUID expected = expectedResults.get( expectedIndex );
assertEquals( "Not returned as excpected", expected, returned );
}
query.setOffsetFromCursor( results.getCursor() );
}
while ( results.getCursor() != null );
long stop = System.currentTimeMillis();
LOG.info( "Query took {} ms to return {} entities", stop - start, expectedResults.size() );
assertEquals( "All names returned", expectedResults.size(), expectedIndex );
}
}
|
Fixed NotSubPropertyIT
|
stack/core/src/test/java/org/apache/usergrid/persistence/query/NotSubPropertyIT.java
|
Fixed NotSubPropertyIT
|
|
Java
|
apache-2.0
|
642f8e43ec8f0415926f16909cddc97072255f4c
| 0
|
YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion
|
/**
* @copyright
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
* @endcopyright
*/
package org.apache.subversion.javahl.types;
/**
* Object that describes a revision range
*/
public class RevisionRange implements Comparable<RevisionRange>, java.io.Serializable
{
// Update the serialVersionUID when there is a incompatible change
// made to this class. See any of the following, depending upon
// the Java release.
// http://java.sun.com/j2se/1.3/docs/guide/serialization/spec/version.doc7.html
// http://java.sun.com/j2se/1.4/pdf/serial-spec.pdf
// http://java.sun.com/j2se/1.5.0/docs/guide/serialization/spec/version.html#6678
// http://java.sun.com/javase/6/docs/platform/serialization/spec/version.html#6678
private static final long serialVersionUID = 2L;
private Revision from;
private Revision to;
private boolean inheritable;
/**
* Creates a new instance. Called by native library.
*/
protected RevisionRange(long from, long to, boolean inhertiable)
{
this.from = Revision.getInstance(from);
this.to = Revision.getInstance(to);
this.inheritable = inheritable;
}
/** @since 1.9 */
public RevisionRange(Revision from, Revision to, boolean inhertiable)
{
this.from = from;
this.to = to;
this.inheritable = inheritable;
}
public RevisionRange(Revision from, Revision to)
{
this.from = from;
this.to = to;
this.inheritable = true;
}
/**
* Accepts a string in one of these forms: n m-n Parses the results into a
* from and to revision
* @param revisionElement revision range or single revision
*/
public RevisionRange(String revisionElement)
{
super();
if (revisionElement == null)
{
return;
}
this.inheritable = !revisionElement.endsWith("*");
if (!this.inheritable)
revisionElement =
revisionElement.substring(0, revisionElement.length() - 1);
int hyphen = revisionElement.indexOf('-');
if (hyphen > 0)
{
try
{
long fromRev = Long
.parseLong(revisionElement.substring(0, hyphen));
long toRev = Long.parseLong(revisionElement
.substring(hyphen + 1));
this.from = new Revision.Number(fromRev);
this.to = new Revision.Number(toRev);
}
catch (NumberFormatException e)
{
return;
}
}
else
{
try
{
long revNum = Long.parseLong(revisionElement.trim());
this.from = new Revision.Number(revNum);
this.to = this.from;
}
catch (NumberFormatException e)
{
return;
}
}
}
public Revision getFromRevision()
{
return from;
}
public Revision getToRevision()
{
return to;
}
public boolean isInheritable()
{
return inheritable;
}
public String toString()
{
if (from != null && to != null)
{
String rep = (from.equals(to) ? from.toString()
: from.toString() + '-' + to.toString());
if (!inheritable)
return rep + '*';
return rep;
}
return super.toString();
}
public static Long getRevisionAsLong(Revision rev)
{
long val = 0;
if (rev != null && rev instanceof Revision.Number)
{
val = ((Revision.Number) rev).getNumber();
}
return new Long(val);
}
public int hashCode()
{
final int prime = 31;
int result = (inheritable ? 1 : 2);
result = prime * result + ((from == null) ? 0 : from.hashCode());
result = prime * result + ((to == null) ? 0 : to.hashCode());
return result;
}
/**
* @param range The RevisionRange to compare this object to.
*/
public boolean equals(Object range)
{
if (this == range)
return true;
if (!super.equals(range))
return false;
if (getClass() != range.getClass())
return false;
final RevisionRange other = (RevisionRange) range;
if (from == null)
{
if (other.from != null)
return false;
}
else if (!from.equals(other.from))
{
return false;
}
if (to == null)
{
if (other.to != null)
return false;
}
else if (!to.equals(other.to))
{
return false;
}
return (inheritable == other.inheritable);
}
/**
* <b>Note:</b> Explicitly ignores inheritable state.
*
* @param range The RevisionRange to compare this object to.
*/
public int compareTo(RevisionRange range)
{
if (this == range)
return 0;
Revision other = (range).getFromRevision();
return RevisionRange.getRevisionAsLong(this.getFromRevision())
.compareTo(RevisionRange.getRevisionAsLong(other));
}
}
|
subversion/bindings/javahl/src/org/apache/subversion/javahl/types/RevisionRange.java
|
/**
* @copyright
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
* @endcopyright
*/
package org.apache.subversion.javahl.types;
/**
* Object that describes a revision range
*/
public class RevisionRange implements Comparable<RevisionRange>, java.io.Serializable
{
// Update the serialVersionUID when there is a incompatible change
// made to this class. See any of the following, depending upon
// the Java release.
// http://java.sun.com/j2se/1.3/docs/guide/serialization/spec/version.doc7.html
// http://java.sun.com/j2se/1.4/pdf/serial-spec.pdf
// http://java.sun.com/j2se/1.5.0/docs/guide/serialization/spec/version.html#6678
// http://java.sun.com/javase/6/docs/platform/serialization/spec/version.html#6678
private static final long serialVersionUID = 2L;
private Revision from;
private Revision to;
private boolean inheritable;
/**
* Creates a new instance. Called by native library.
*/
protected RevisionRange(long from, long to, boolean inhertiable)
{
this.from = Revision.getInstance(from);
this.to = Revision.getInstance(to);
this.inheritable = inheritable;
}
public RevisionRange(Revision from, Revision to, boolean inhertiable)
{
this.from = from;
this.to = to;
this.inheritable = inheritable;
}
public RevisionRange(Revision from, Revision to)
{
this.from = from;
this.to = to;
this.inheritable = true;
}
/**
* Accepts a string in one of these forms: n m-n Parses the results into a
* from and to revision
* @param revisionElement revision range or single revision
*/
public RevisionRange(String revisionElement)
{
super();
if (revisionElement == null)
{
return;
}
this.inheritable = !revisionElement.endsWith("*");
if (!this.inheritable)
revisionElement =
revisionElement.substring(0, revisionElement.length() - 1);
int hyphen = revisionElement.indexOf('-');
if (hyphen > 0)
{
try
{
long fromRev = Long
.parseLong(revisionElement.substring(0, hyphen));
long toRev = Long.parseLong(revisionElement
.substring(hyphen + 1));
this.from = new Revision.Number(fromRev);
this.to = new Revision.Number(toRev);
}
catch (NumberFormatException e)
{
return;
}
}
else
{
try
{
long revNum = Long.parseLong(revisionElement.trim());
this.from = new Revision.Number(revNum);
this.to = this.from;
}
catch (NumberFormatException e)
{
return;
}
}
}
public Revision getFromRevision()
{
return from;
}
public Revision getToRevision()
{
return to;
}
public boolean isInheritable()
{
return inheritable;
}
public String toString()
{
if (from != null && to != null)
{
String rep = (from.equals(to) ? from.toString()
: from.toString() + '-' + to.toString());
if (!inheritable)
return rep + '*';
return rep;
}
return super.toString();
}
public static Long getRevisionAsLong(Revision rev)
{
long val = 0;
if (rev != null && rev instanceof Revision.Number)
{
val = ((Revision.Number) rev).getNumber();
}
return new Long(val);
}
public int hashCode()
{
final int prime = 31;
int result = (inheritable ? 1 : 2);
result = prime * result + ((from == null) ? 0 : from.hashCode());
result = prime * result + ((to == null) ? 0 : to.hashCode());
return result;
}
/**
* @param range The RevisionRange to compare this object to.
*/
public boolean equals(Object range)
{
if (this == range)
return true;
if (!super.equals(range))
return false;
if (getClass() != range.getClass())
return false;
final RevisionRange other = (RevisionRange) range;
if (from == null)
{
if (other.from != null)
return false;
}
else if (!from.equals(other.from))
{
return false;
}
if (to == null)
{
if (other.to != null)
return false;
}
else if (!to.equals(other.to))
{
return false;
}
return (inheritable == other.inheritable);
}
/**
* @param range The RevisionRange to compare this object to.
*/
public int compareTo(RevisionRange range)
{
if (this == range)
return 0;
Revision other = (range).getFromRevision();
return RevisionRange.getRevisionAsLong(this.getFromRevision())
.compareTo(RevisionRange.getRevisionAsLong(other));
// NOTE: Explicitly ignores inheritable state.
}
}
|
* subversion/bindings/javahl/src/org/apache/subversion/javahl/types/RevisionRange.java:
(RevisionRange.RevisionRange, RevisionRange.compareTo): Update docstrings.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@1499364 13f79535-47bb-0310-9956-ffa450edef68
|
subversion/bindings/javahl/src/org/apache/subversion/javahl/types/RevisionRange.java
|
* subversion/bindings/javahl/src/org/apache/subversion/javahl/types/RevisionRange.java: (RevisionRange.RevisionRange, RevisionRange.compareTo): Update docstrings.
|
|
Java
|
apache-2.0
|
634e1d2baad1515089be0ee80678a8c23830b10c
| 0
|
rodsol/relex,rodsol/relex,williampma/relex,leungmanhin/relex,AmeBel/relex,ainishdave/relex,virneo/relex,linas/relex,opencog/relex,opencog/relex,williampma/relex,virneo/relex,williampma/relex,virneo/relex,AmeBel/relex,opencog/relex,ainishdave/relex,ainishdave/relex,ainishdave/relex,rodsol/relex,AmeBel/relex,virneo/relex,linas/relex,leungmanhin/relex,rodsol/relex,williampma/relex,leungmanhin/relex,linas/relex
|
/*
* Copyright 2009 Linas Vepstas
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package relex.test;
import java.util.ArrayList;
import java.util.Collections;
import relex.ParsedSentence;
import relex.RelationExtractor;
import relex.Sentence;
import relex.output.SimpleView;
public class TestRelEx
{
private static RelationExtractor re;
private int pass;
private int fail;
private int subpass;
private int subfail;
private static ArrayList<String> sentfail= new ArrayList<String>();
// @BeforeClass
public static void setUpClass() {
re = new RelationExtractor();
}
public TestRelEx()
{
pass = 0;
fail = 0;
subpass = 0;
subfail = 0;
}
public ArrayList<String> split(String a)
{
String[] sa = a.split("\n");
ArrayList<String> saa = new ArrayList<String>();
for (String s : sa) {
saa.add(s);
}
Collections.sort (saa);
return saa;
}
/**
* First argument is the sentence.
* Second argument is a list of the relations that RelEx
* should be generating.
* Return true if RelEx generates the same dependencies
* as the second argument.
*/
public boolean test_sentence (String sent, String sf)
{
re.do_penn_tagging = false;
re.setMaxParses(1);
Sentence sntc = re.processSentence(sent);
ParsedSentence parse = sntc.getParses().get(0);
String rs = SimpleView.printBinaryRelations(parse);
String urs = SimpleView.printUnaryRelations(parse);
ArrayList<String> exp = split(sf);
ArrayList<String> brgot = split(rs);
ArrayList<String> urgot = split(urs);
// add number of binary relations from parser-output,
// to total number of relationships gotten
int sizeOfGotRelations= brgot.size();
// check expected binary and unary relations
// the below for-loop checks whether all expected binary relations are
// contained in the parser-binary-relation-output arrayList "brgot".
// if any unary relations are expected in the output it checks the
// parser-unary-relation-output arrayList "urgot" for unary relationships
for (int i=0; i< exp.size(); i++)
{
if(!brgot.contains(exp.get(i)))
{
if(!urgot.contains(exp.get(i)))
{
System.err.println("Error: content miscompare:\n" +
"\tExpected = " + exp + "\n" +
"\tGot Binary Relations = " + brgot + "\n" +
"\tGot Unary Relations = " + urgot + "\n" +
"\tSentence = " + sent);
subfail ++;
fail ++;
sentfail.add(sent);
return false;
}
// add the unary relation, count to total number of
// binary relations
sizeOfGotRelations++;
}
}
// The size checking of the expected relationships vs output
// relationships is done here purposefully, to accommodate if
// there is any unary relationships present in the expected
// output(see above for-loop also). However it only checks
// whether parser-output resulted more relationships(binary+unary)
// than expected relations. If the parser-output resulted in
// fewer relationships(binary+unary) than expected it would
// catch that in the above for-loop.
if (exp.size() < sizeOfGotRelations)
{
System.err.println("Error: size miscompare:\n" +
"\tExpected = " + exp + "\n" +
"\tGot Binary Relations = " + brgot + "\n" +
"\tGot Unary Relations = " + urgot + "\n" +
"\tSentence = " + sent);
subfail ++;
fail ++;
sentfail.add(sent);
return false;
}
subpass ++;
pass ++;
return true;
}
public void report(boolean rc, String subsys)
{
if (rc) {
System.err.println(subsys + ": Tested " + pass +
" sentences, test passed OK");
} else {
System.err.println(subsys + ": Test failed\n\t" +
fail + " sentences failed\n\t" +
pass + " sentences passed");
}
subpass = 0;
subfail = 0;
}
public boolean test_determiners()
{
boolean rc = true;
rc &= test_sentence ("Ben ate my cookie.",
"_subj(eat, Ben)\n" +
"_obj(eat, cookie)\n" +
"_poss(cookie, me)\n");
rc &= test_sentence ("Ben ate that cookie.",
"_subj(eat, Ben)\n" +
"_obj(eat, cookie)\n" +
"_det(cookie, that)\n");
rc &= test_sentence ("All my writings are bad.",
"_predet(writings, all)\n" +
"_poss(writings, me)\n" +
"_predadj(writings, bad)\n");
rc &= test_sentence ("All his designs are bad.",
"_predet(design, all)\n" +
"_poss(design, him)\n" +
"_predadj(design, bad)\n");
rc &= test_sentence ("All the boys knew it.",
"_subj(know, boy)\n" +
"_obj(know, it)\n" +
"_predet(boy, all)\n");
rc &= test_sentence ("Joan thanked Susan for all the help she had given.",
"for(thank, help)\n" +
"_subj(thank, Joan)\n" +
"_obj(thank, Susan)\n" +
"_predet(help, all)\n" +
"_subj(give, she)\n" +
"_obj(give, help)\n");
report(rc, "Determiners");
return rc;
}
public boolean test_time()
{
boolean rc = true;
rc &= test_sentence("I had breakfast at 8 am.",
"_obj(have, breakfast)\n"+
"at(have, am)\n" +
"_subj(have, I)\n" +
"_time(am, 8)\n");
rc &= test_sentence("I had supper before 6 pm.",
"_obj(have, supper)\n" +
"before(have, pm)\n" +
"_subj(have, I)\n" +
"_time(pm, 6)\n");
report(rc, "Time");
return rc;
}
public boolean test_comparatives()
{
boolean rc = true;
rc &= test_sentence ("Some people like pigs less than dogs.",
"_advmod(like, less)\n" +
"_obj(like, pig)\n" +
"_quantity(people, some)\n" +
"_subj(like, people)\n" +
"than(pig, dog)\n");
rc &= test_sentence ("Some people like pigs more than dogs.",
"_advmod(like, more)\n" +
"_obj(like, pig)\n" +
"_quantity(people, some)\n" +
"_subj(like, people)\n" +
"than(pig, dog)\n");
//Non-equal Gradable : Two entities one feature "more/less"
rc &= test_sentence ("He is more intelligent than John.",
"than(he, John)\n" +
"_comparative(intelligent, he)\n" +
"degree(intelligent, comparative)\n"+
"_advmod(intelligent, more)\n"+
"_predadj(he, intelligent)\n");
rc &= test_sentence ("He is less intelligent than John.",
"than(he, John)\n" +
"_comparative(intelligent, he)\n" +
"degree(intelligent, comparative)\n"+
"_advmod(intelligent, less)\n"+
"_predadj(he, intelligent)\n");
rc &= test_sentence ("He runs more quickly than John.",
"_advmod(run, quickly)\n"+
"_advmod(quickly, more)\n"+
"_subj(run, he)\n" +
"than(he, John)\n" +
"_comparative(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs less quickly than John.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"_advmod(quickly, less)\n"+
"than(he, John)\n" +
"_comparative(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs more quickly than John does.",
"_advmod(run, quickly)\n" +
"_advmod(quickly, more)\n"+
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"than(he, John)\n" +
"_comparative(quickly, run)\n" +
"degree(quickly, comparative)\n");
// This sentence is ungrammatical but commonly used by
// non-native English speakers
rc &= test_sentence ("He runs less quickly than John does.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"_advmod(quickly, less)\n"+
"than(he, John)\n" +
"_comparative(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs slower than John does.",
"_advmod(run, slow)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"than(he, John)\n" +
"_comparative(slow, run)\n" +
"degree(slow, comparative)\n");
rc &= test_sentence ("He runs more than John.",
"_obj(run, more)\n" +
"_subj(run, he)\n" +
"than(he, John)\n"+
"_comparative(more, run)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He runs less than John.",
"_obj(run, less)\n" +
"_subj(run, he)\n" +
"than(he, John)\n"+
"_comparative(less, run)\n"+
"degree(less, comparative)\n");
rc &= test_sentence ("He runs faster than John.",
"than(he, John)\n" +
"_comparative(fast, run)\n" +
"_subj(run, he)\n"+
"_advmod(run, fast)\n"+
"degree(fast, comparative)\n");
rc &= test_sentence ("He runs more slowly than John.",
"than(he, John)\n" +
"_subj(run, he)\n" +
"_advmod(slowly, more)\n"+
"_comparative(slowly, run)\n"+
"_advmod(run, slowly)\n"+
"degree(slowly, comparative)\n");
rc &= test_sentence ("He runs less slowly than John.",
"than(he, John)\n" +
"_subj(run, he)\n" +
"_comparative(slowly, run)\n"+
"_advmod(run, slowly)\n"+
"_advmod(slowly, less)\n"+
"degree(slowly, comparative)\n");
rc &= test_sentence ("He runs more miles than John does.",
"than(he, John)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"_obj(run, mile)\n"+
"_comparative(mile, run)\n"+
"_quantity(mile, more)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He runs fewer miles than John does.",
"than(he, John)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"_obj(run, mile)\n"+
"_comparative(mile, run)\n"+
"_quantity(mile, fewer)\n"+
"degree(fewer, comparative)\n");
rc &= test_sentence ("He runs many more miles than John does.",
"than(he, John)\n" +
"_comparative(mile, run)\n"+
"_obj(run, mile)\n"+
"_subj(run, he)\n" +
"_subj(do, John)\n" +
"_quantity(mile, many)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He runs ten more miles than John.",
"_obj(run, mile)\n"+
"_subj(run, he)\n" +
"than(he, John)\n" +
"_comparative(mile, run)\n"+
"_quantity(mile, ten)\n" +
"numeric-FLAG(ten, T)\n" +
"degree(more, comparative)\n");
rc &= test_sentence ("He runs almost ten more miles than John does.",
"_obj(run, mile)\n"+
"_subj(run, he)\n"+
"_comparative(mile, run)\n"+
"_subj(do, John)\n"+
"than(he, John)\n"+
"_quantity_mod(ten, almost)\n"+
"_quantity(mile, ten)\n"+
"numeric-FLAG(ten, T)\n" +
"degree(more, comparative)\n");
rc &= test_sentence ("He runs more often than John.",
"_subj(run, he)\n"+
"_advmod(often, more)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(he, John)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("He runs less often than John.",
"_subj(run, he)\n"+
"_advmod(often, less)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(he, John)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("He runs here more often than John.",
"_advmod(run, here)\n"+
"_advmod(often, more)\n"+
"_advmod(run, often)\n"+
"_subj(run, he)\n"+
"_comparative(often, run)\n"+
"than(he, John)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("He runs here less often than John.",
"_advmod(run, here)\n"+
"_advmod(often, less)\n"+
"_advmod(run, often)\n"+
"_subj(run, he)\n"+
"_comparative(often, run)\n"+
"than(he, John)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("He is faster than John.",
"than(he, John)\n"+
"_predadj(he, fast)\n"+
"_comparative(fast, he)\n"+
"degree(fast, comparative)\n");
rc &= test_sentence ("He is faster than John is.",
"than(he, John)\n"+
"_predadj(he, fast)\n"+
"_subj(be, John)\n"+
"_comparative(fast, he)\n"+
"degree(fast, comparative)\n");
rc &= test_sentence ("His speed is faster than John's.",
"than(speed, be)\n"+
"_predadj(speed, fast)\n"+
"_poss(speed, him)\n"+
"_comparative(fast, speed)\n"+
"degree(fast, comparative)\n");
rc &= test_sentence ("I run more than Ben.",
"_subj(run, I)\n"+
"_obj(run, more)\n"+
"_comparative(more, run)\n"+
"than(I, Ben)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("I run less than Ben.",
"_subj(run, I)\n"+
"_obj(run, less)\n"+
"_comparative(less, run)\n"+
"than(I, Ben)\n"+
"degree(less, comparative)\n");
rc &= test_sentence ("I run more miles than Ben.",
"_subj(run, I)\n"+
"_obj(run, mile)\n"+
"_quantity(mile, more)\n"+
"_comparative(mile, run)\n"+
"than(I, Ben)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("I run fewer miles than Ben.",
"_subj(run, I)\n"+
"_obj(run, mile)\n"+
"_quantity(mile, fewer)\n"+
"_comparative(mile, run)\n"+
"than(I, Ben)\n"+
"degree(fewer, comparative)\n");
rc &= test_sentence ("I run 10 more miles than Ben.",
"_subj(run, I)\n"+
"_obj(run, mile)\n"+
"_quantity(mile, 10)\n"+
"_comparative(mile, run)\n"+
"than(I, Ben)\n"+
"numeric-FLAG(10, T)\n" +
"degree(more, comparative)\n");
rc &= test_sentence ("I run 10 fewer miles than Ben.",
"_subj(run, I)\n"+
"_obj(run, mile)\n"+
"_quantity(mile, 10)\n"+
"_comparative(mile, run)\n"+
"than(I, Ben)\n"+
"numeric-FLAG(10, T)\n" +
"degree(fewer, comparative)\n");
rc &= test_sentence ("I run more often than Ben.",
"_subj(run, I)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(often, more)\n");
rc &= test_sentence ("I run less often than Ben.",
"_subj(run, I)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(often, less)\n");
rc &= test_sentence ("I run more often than Ben does.",
"_subj(run, I)\n"+
"_subj(do, Ben)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(often, more)\n");
rc &= test_sentence ("I run less often than Ben does.",
"_subj(run, I)\n"+
"_subj(do, Ben)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(often, less)\n");
rc &= test_sentence ("I run more often than Ben climbs.",
"_subj(run, I)\n"+
"_subj(climb, Ben)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(run, often)\n"+
"_advmod(often, more)\n");
rc &= test_sentence ("I run less often than Ben climbs.",
"_subj(run, I)\n"+
"_subj(climb, Ben)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(run, often)\n"+
"_advmod(often, less)\n");
rc &= test_sentence ("I run more races than Ben wins contests.",
"_subj(run, I)\n"+
"_obj(run, race)\n"+
"_subj(win, Ben)\n"+
"_obj(win, contest)\n"+
"_quantity(race, more)\n"+
"_comparative(race, run)\n"+
"than(I, Ben)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("I run fewer races than Ben wins contests.",
"_subj(run, I)\n"+
"_obj(run, race)\n"+
"_subj(win, Ben)\n"+
"_obj(win, contest)\n"+
"_quantity(race, fewer)\n"+
"_comparative(race, run)\n"+
"than(I, Ben)\n"+
"degree(fewer, comparative)\n");
rc &= test_sentence ("I have more chairs than Ben.",
"_obj(have, chair)\n"+
"_subj(have, I)\n"+
"than(I, Ben)\n"+
"_comparative(chair, have)\n"+
"_quantity(chair, more)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("I have fewer chairs than Ben.",
"_obj(have, chair)\n"+
"_subj(have, I)\n"+
"than(I, Ben)\n"+
"_comparative(chair, have)\n"+
"_quantity(chair, fewer)\n"+
"degree(fewer, comparative)\n");
rc &= test_sentence ("He earns much more money than I do.",
"_obj(earn, money)\n"+
"_subj(do, I)\n"+
"_subj(earn, he)\n"+
"than(he, I)\n"+
"_comparative(money, earn)\n"+
"_quantity(money, more)\n"+
"_advmod(more, much)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He earns much less money than I do.",
"_obj(earn, money)\n"+
"_subj(do, I)\n"+
"_subj(earn, he)\n"+
"than(he, I)\n"+
"_comparative(money, earn)\n"+
"_quantity(money, less)\n"+
"_advmod(less, much)\n"+
"degree(less, comparative)\n");
rc &= test_sentence ("She comes here more often than her husband.",
"_advmod(come, here)\n"+
"_advmod(often, more)\n"+
"_advmod(come, often)\n"+
"_subj(come, she)\n"+
"_poss(husband, her)\n"+
"_comparative(often, come)\n"+
"than(she, husband)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("She comes here less often than her husband.",
"_advmod(come, here)\n"+
"_advmod(often, less)\n"+
"_advmod(come, often)\n"+
"_subj(come, she)\n"+
"_poss(husband, her)\n"+
"_comparative(often, come)\n"+
"than(she, husband)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("Russian grammar is more difficult than English grammar.",
"_comparative(difficult, grammar)\n"+
"than(grammar, grammar)\n"+
"_amod(grammar, Russian)\n"+ //When link-grammar uses A, relex should use _amod it will use A instead of AN; will be updated in next linkgrammer version
"_predadj(grammar, difficult)\n"+
"_amod(grammar, English)\n"+
"_advmod(difficult, more)\n"+
"degree(difficult, comparative)\n");
rc &= test_sentence ("Russian grammar is less difficult than English grammar.",
"_comparative(difficult, grammar)\n"+
"than(grammar, grammar)\n"+
"_amod(grammar, Russian)\n"+
"_predadj(grammar, difficult)\n"+
"_amod(grammar, English)\n"+
"_advmod(difficult, less)\n"+
"degree(difficult, comparative)\n");
rc &= test_sentence ("My sister is much more intelligent than me.",
"_amod(much, intelligent)\n"+
"_predadj(sister, intelligent)\n"+
"_poss(sister, me)\n"+
"than(sister, me)\n"+
"_comparative(intelligent, sister)\n"+
"_advmod(intelligent, more)\n"+
"degree(intelligent, comparative)\n");
rc &= test_sentence ("My sister is much less intelligent than me.",
"_amod(much, intelligent)\n"+
"_predadj(sister, intelligent)\n"+
"_poss(sister, me)\n"+
"than(sister, me)\n"+
"_comparative(intelligent, sister)\n"+
"_advmod(intelligent, less)\n"+
"degree(intelligent, comparative)\n");
rc &= test_sentence ("I find maths lessons more enjoyable than science lessons.",
"_iobj(find, maths)\n"+
"_obj(find, lesson)\n"+
"_subj(find, I)\n"+
"_amod(lesson, enjoyable)\n"+
"_nn(lesson, science)\n"+
"than(maths, science)\n"+
"_comparative(enjoyable, maths)\n"+
"_advmod(enjoyable, more)\n"+
"degree(enjoyable, comparative)\n");
rc &= test_sentence ("I find maths lessons less enjoyable than science lessons.",
"_iobj(find, maths)\n"+
"_obj(find, lesson)\n"+
"_subj(find, I)\n"+
"_amod(lesson, enjoyable)\n"+
"_nn(lesson, science)\n"+
"than(maths, science)\n"+
"_comparative(enjoyable, maths)\n"+
"_advmod(enjoyable, less)\n"+
"degree(enjoyable, comparative)\n");
// Comparatives Without More/less terms
rc &= test_sentence ("Her great-grandson is nicer than her great-granddaughter.",
"than(great-grandson, great-granddaughter)\n"+
"_predadj(great-grandson, nice)\n"+
"_poss(great-grandson, her)\n"+
"_poss(great-granddaughter, her)\n"+
"_comparative(nice, great-grandson)\n"+
"degree(nice, comparative)\n");
rc &= test_sentence ("George is cleverer than Norman.",
"than(George, Norman)\n"+
"_predadj(George, clever)\n"+
"_comparative(clever, George)\n"+
"degree(clever, comparative)\n");
rc &= test_sentence ("Kim is taller than Linda.",
"than(Kim, Linda)\n"+
"_predadj(Kim, tall)\n"+
"_comparative(tall, Kim)\n"+
"degree(tall, comparative)\n");
rc &= test_sentence ("Venus is brighter than Mars.",
"than(Venus, Mars)\n"+
"_predadj(Venus, bright)\n"+
"_comparative(bright, Venus)\n"+
"degree(bright, comparative)\n");
rc &= test_sentence ("Mary is shorter than Jane.",
"than(Mary, Jane)\n"+
"_predadj(Mary, short)\n"+
"_comparative(short, Mary)\n"+
"degree(short, comparative)\n");
rc &= test_sentence ("I am happier than you.",
"than(I, you)\n"+
"_predadj(I, happy)\n"+
"_comparative(happy, I)\n"+
"degree(happy, comparative)");
rc &= test_sentence ("His house is bigger than hers.",
"than(house, hers)\n"+
"_predadj(house, big)\n"+
"_poss(house, him)\n"+
"_comparative(big ,house)\n"+
"degree(big, comparative)");
rc &= test_sentence ("She is two years older than me.",
"_obj(is, year)\n"+
"_amod(years, old)\n"+
"_quantity(year, two)\n"+
"numeric-FLAG(two, T)\n" +
"than(she, me)\n"+
"_comparative(old, she)\n"+
"degree(old, comparative)");
rc &= test_sentence ("New York is much bigger than Boston.",
"_subj(is, New_York)\n"+
"_amod(much, big)\n"+
"than(New_York, Boston)\n"+
"_comparative(big, New_York)\n"+
"degree(big, comparative)");
rc &= test_sentence ("He is a better player than Ronaldo.",
"_obj(be, player)\n"+
"_subj(be, he)\n"+
"_amod(player, good)\n"+
"than(he, Ronaldo)\n"+
"_comparative(good, he)\n"+
"degree(good, comparative)");
rc &= test_sentence ("France is a bigger country than Britain.",
"_obj(is, country)\n"+
"_subj(is, France)\n"+
"_amod(country, big)\n"+
"than(France, Britain)\n"+
"_comparative(big, France)\n"+
"degree(big, comparative)\n");
rc &= test_sentence ("That joke was funnier than his joke.",
"_predadj(joke, funny)\n"+
"than(joke, joke)\n"+
"_det(joke, that)\n"+
"_poss(joke, him)\n"+
"_comparative(funny, joke)\n"+
"degree(funny, comparative)");
rc &= test_sentence ("Our car is bigger than your car.",
"than(car, car)\n"+
"_predadj(car, big)\n"+
"_poss(car, us)\n"+
"_det(car, you)\n"+
"_poss(car, you)\n"+
"_comparative(big, car)\n"+
"degree(big, comparative)");
// Sentences need to check
rc &= test_sentence ("This computer is better than that one.",
"than(computer, one)\n"+
"_det(computer, this)\n"+
"_predadj(computer, good)\n"+
"_det(one, that)\n"+
"degree(good, comparative)\n"+
"_comparative(good, computer)\n");
rc &= test_sentence ("He's simpler than I thought.",
"than(he, I)\n"+
"_subj(think, I)\n"+
"_comparative(simple, he)\n"+
"_predadj(he, simple)\n"+
"degree(simple, comparative)\n");
rc &= test_sentence ("She's stronger at chess than I am.",
"at(strong, chess)\n"+
"than(she, I)\n"+
"_predadj(she, strong)\n"+
"degree(strong, comparative)\n"+
"_comparative(strong, she)\n");
rc &= test_sentence ("She's prettier than her mother.",
"_predadj(she, pretty)\n"+
"than(she, mother)\n"+
"_poss(mother, her)\n"+
"_comparative(pretty, she)\n"+
"degree(pretty, comparative)\n");
rc &= test_sentence ("This exam was more difficult than the other.",
"than(exam, other)\n"+
"_det(exam, this)\n"+
"_predadj(exam, difficult)\n"+
"_advmod(difficult, more)\n"+
"_comparative(difficult, exam)\n"+
"degree(difficult, comparative)\n");
rc &= test_sentence ("It's much colder today than it was yesterday.",
"_subj(be, it)\n"+
"than(today, yesterday)\n"+
"_advmod(cold, today)\n"+
"_advmod(cold, yesterday)\n"+
"_predadj(it, cold)\n"+
"_comparative(cold, it)\n"+
"degree(cold, comparative)\n");
rc &= test_sentence ("This grammar topic is easier than most others.",
"than(topic, others)\n"+
"_det(topic, this)\n"+
"_nn(topic, grammar)\n"+
"_predadj(topic, easy)\n"+
"_quantity(others, most)\n"+
"_comparative(easy, topic)\n"+
"degree(easy, comparative)\n");
rc &= test_sentence ("I find science more difficult than mathematics.",
"_obj(find, science)\n"+
"_subj(find, I)\n"+
"_advmod(difficult, more)\n"+
"than(science, mathematics)\n"+
"_comparative(difficult, science)\n"+
"degree(difficult, comparative)\n");
//one entity two or more features
rc &= test_sentence ("He is more intelligent than attractive.",
"than(intelligent, attractive)\n"+
"_predadj(he, intelligent)\n"+
"_advmod(intelligent, more)\n"+
"_comparative(intelligent, he)\n"+
"degree(intelligent, comparative)\n");
rc &= test_sentence ("He is less intelligent than attractive.",
"than(intelligent, attractive)\n"+
"_predadj(he, intelligent)\n"+
"_advmod(intelligent, less)\n"+
"_comparative(intelligent, he)\n"+
"degree(intelligent, comparative)\n");
rc &= test_sentence ("The dog was more hungry than angry.",
"_predadj(dog, hungry)\n"+
"than(hungry, angry)\n"+
"_advmod(hungry, more)\n"+
"_comparative(hungry, dog)\n"+
"degree(hungry, comparative)\n");
rc &= test_sentence ("The dog was less hungry than angry.",
"_predadj(dog, hungry)\n"+
"than(hungry, angry)\n"+
"_advmod(hungry, less)\n"+
"_comparative(hungry, dog)\n"+
"degree(hungry, comparative)\n");
rc &= test_sentence ("He did it more quickly than carefully.",
"_obj(do, it)\n"+
"_subj(do, he)\n"+
"than(quickly, carefully)\n"+
"_advmod(do, quickly)\n"+
"_advmod(quickly, more)\n"+
"_comparative(quickly, do)\n"+
"degree(quickly, comparative)\n");
rc &= test_sentence ("He did it less quickly than carefully.",
"_obj(do, it)\n"+
"_subj(do, he)\n"+
"than(quickly, carefully)\n"+
"_advmod(do, quickly)\n"+
"_advmod(quickly, less)\n"+
"_comparative(quickly, do)\n"+
"degree(quickly, comparative)\n");
rc &= test_sentence ("He has more money than time.",
"_obj(have, money)\n"+
"_subj(have, he)\n"+
"than(money, time)\n"+
"_quantity(money, more)\n"+
"_comparative(money, have)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He has less money than time.",
"_obj(have, money)\n"+
"_subj(have, he)\n"+
"than(money, time)\n"+
"_quantity(money, less)\n"+
"_comparative(money, have)\n"+
"degree(less, comparative)\n");
rc &= test_sentence ("He plays more for money than for pleasure.",
"_subj(play, he)\n"+
"_obj(play, more)\n"+
"for(play, money)\n"+
"for(than, pleasure)\n"+
"than(money, pleasure)\n"+
"_comparative(more, play)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He plays less for money than for pleasure.",
"_subj(play, he)\n"+
"_obj(play, less)\n"+
"for(play, money)\n"+
"for(than, pleasure)\n"+
"than(money, pleasure)\n"+
"_comparative(less, play)\n"+
"degree(less, comparative)\n");
//two entities two features
rc &= test_sentence ("Jack is more ingenious than Ben is crazy.",
"_predadj(Jack, ingenious)\n"+
"_predadj(Ben, crazy)\n"+
"_advmod(ingenious, more)\n"+
"_comparative(ingenious, Jack)\n"+
"than(Jack, Ben)\n"+
"than1(ingenious, crazy)\n"+
"degree(ingenious, comparative)\n");
rc &= test_sentence ("Jack is less ingenious than Ben is crazy.",
"_predadj(Jack, ingenious)\n"+
"_predadj(Ben, crazy)\n"+
"_advmod(ingenious, less)\n"+
"_comparative(ingenious, Jack)\n"+
"than(Jack, Ben)\n"+
"than1(ingenious, crazy)\n"+
"degree(ingenious, comparative)\n");
//two entities two features Without More/less
rc &= test_sentence ("I slept longer than he worked",
"_subj(sleep, I)\n"+
"_subj(work, he)\n"+
"_advmod(sleep, long)\n"+
"than(I, he)\n"+
"than1(sleep, work)\n"+
"_comparative(long, sleep)\n"+
"degree(long, comparative)\n");
report(rc, "Comparatives");
return rc;
}
public boolean test_equatives()
{
boolean rc = true;
//Equative:two entities one feature
rc &= test_sentence ("Amen's hair is as long as Ben's.",
"_poss(hair, Amen)\n"+
"_predadj(hair, long)\n"+
"as(long, Ben)\n"+
"than(Amen, Ben)\n");
rc &= test_sentence ("Amen’s hair is same as Ben’s.",
"_poss(hair, Amen)\n"+
"_predadj(hair, same)\n"+
"as(same, Ben)\n"+
"than(Amen, Ben)\n");
rc &= test_sentence ("Jack’s hair color is similar to that of Ben’s.",
"_poss(color, Jack)\n"+
"_nn(color, hair)\n"+
"_predadj(color, similar)\n"+
"of(that, Ben)\n"+
"to(similar, that)\n"+
"than(Jack, Ben)\n");
rc &= test_sentence ("Jack’s hair color is similar to Ben's",
"_poss(color, Jack)\n"+
"_nn(color, hair)\n"+
"_predadj(color, similar)\n"+
"to(similar, Ben)\n"+
"than(Jack, Ben)\n");
rc &= test_sentence ("Jack is as intelligent as Ben.",
"_predadj(Jack, intelligent)\n"+
"as(intelligent, Ben)\n"+
"than(Jack, Ben)\n");
rc &= test_sentence ("The book’s color is same as that of the pen’s.",
"_poss(color, book)\n"+
"_predadj(color, same)\n"+
"of(that, pen)\n"+
"as(same, that)\n"+
"than(book, pen)\n");
rc &= test_sentence ("The snail is running exactly as fast as the cheetah.",
"_predadj(snail, run)\n"+
"as(run, cheetah)\n"+
"_advmod(fast, exactly)\n"+
"than(snail, cheetah)\n");
//one entity one feature, through time
rc &= test_sentence ("The coffee tastes the same as it did last year.",
"_subj(taste, coffee)\n"+
"_obj(taste, same)\n"+
"_obj(do, year)\n"+
"_subj(do, it)\n"+
"as(taste, do)\n"+
"_amod(year, last)\n");
rc &= test_sentence ("The coffee tastes as it did last year.",
"_subj(taste, coffee)\n"+
"_obj(do, year)\n"+
"_subj(do, it)\n"+
"as(taste, do)\n"+
"_amod(year, last)\n");
rc &= test_sentence ("Mike runs as fast as he did last year.",
"_subj(do, he)\n"+
"_subj(run, Mike)\n"+
"as(fast, he)\n"+
"_advmod(run, fast)\n"+
"_advmod(do, year)\n"+
"_amod(year, last)\n"+
"than(Mike, he)\n");
rc &= test_sentence ("The kick was as soft as the first.",
"_predadj(kick, soft)\n"+
"as(kick, first)\n");
rc &= test_sentence ("He is as smart as I ever expected him to be.",
"_predadj(he, smart)\n"+
"_subj(expect, I)\n"+
"_obj(expect, him)\n"+
"as(smart, expect)\n"+
"_advmod(expect, ever)\n"+
"_to-do(smart, be)\n");
report(rc, "Equatives");
return rc;
}
public boolean test_conjunctions()
{
boolean rc = true;
// conjoined verbs
rc &= test_sentence ("Scientists make observations and ask questions.",
"_obj(make, observation)\n" +
"_obj(ask, question)\n" +
"_subj(make, scientist)\n" +
"_subj(ask, scientist)\n" +
"conj_and(make, ask)\n");
// conjoined nouns
rc &= test_sentence ("She is a student and an employee.",
"_obj(be, student)\n" +
"_obj(be, employee)\n" +
"_subj(be, she)\n" +
"conj_and(student, employee)\n");
// conjoined adjectives
rc &= test_sentence ("I hailed a black and white taxi.",
"_obj(hail, taxi)\n" +
"_subj(hail, I)\n" +
"_amod(taxi, black)\n" +
"_amod(taxi, white)\n" +
"conj_and(black, white)\n");
// conjoined adverbs
rc &= test_sentence ("She ran quickly and quietly.",
"_advmod(run, quickly)\n" +
"_advmod(run, quietly)\n" +
"_subj(run, she)\n" +
"conj_and(quickly, quietly)\n");
// adjectival modifiers on conjoined subject
rc &= test_sentence ("The big truck and the little car collided.",
"_amod(car, little)\n" +
"_amod(truck, big)\n" +
"_subj(collide, truck)\n" +
"_subj(collide, car)\n" +
"conj_and(truck, car)\n");
// verbs with modifiers
rc &= test_sentence ("We ate dinner at home and went to the movies.",
"_obj(eat, dinner)\n" +
"conj_and(eat, go)\n" +
"at(eat, home)\n" +
"_subj(eat, we)\n" +
"to(go, movie)\n" +
"_subj(go, we)\n");
// verb with more modifiers
rc &= test_sentence ("We ate a late dinner at home and went out to the movies afterwards.",
"_obj(eat, dinner)\n" +
"conj_and(eat, go_out)\n" +
"at(eat, home)\n" +
"_subj(eat, we)\n" +
"to(go_out, movie)\n" +
"_advmod(go_out, afterwards)\n" +
"_subj(go_out, we)\n" +
"_amod(dinner, late)\n");
// conjoined ditransitive verbs
rc &= test_sentence ("She baked him a cake and sang him a song.",
"_iobj(sing, him)\n" +
"_obj(sing, song)\n" +
"_subj(sing, she)\n" +
"_iobj(bake, him)\n" +
"_obj(bake, cake)\n" +
"conj_and(bake, sing)\n" +
"_subj(bake, she)\n");
// conjoined adverbs with modifiers
rc &= test_sentence ("she ran very quickly and extremely quietly.",
"_advmod(run, quickly)\n" +
"_advmod(run, quietly)\n" +
"_subj(run, she)\n" +
"_advmod(quietly, extremely)\n" +
"conj_and(quickly, quietly)\n" +
"_advmod(quickly, very)\n");
// conjoined adverbs with out modifiers
rc &= test_sentence ("She handled it quickly and gracefully.",
"_obj(handle, quickly)\n" +
"_obj(handle, gracefully)\n" +
"_advmod(handle, quickly)\n" +
"_advmod(handle, gracefully)\n" +
"_subj(handle, she)\n" +
"conj_and(quickly, gracefully)\n");
// modifiers on conjoined adjectives
rc &= test_sentence ("He had very long and very white hair.",
"_obj(have, hair)\n" +
"_subj(have, he)\n" +
"_amod(hair, long)\n" +
"_amod(hair, white)\n" +
"_advmod(white, very)\n" +
"conj_and(long, white)\n" +
"_advmod(long, very)\n");
// adjectival modifiers on conjoined object
rc &= test_sentence ("The collision was between the little car and the big truck.",
"_pobj(between, car)\n" +
"_pobj(between, truck)\n" +
"_psubj(between, collision)\n" +
"_amod(truck, big)\n" +
"_amod(car, little)\n" +
"conj_and(car, truck)\n");
// Names Modifiers and conjunction
rc &= test_sentence ("Big Tom and Angry Sue went to the movies.",
"to(go, movie)\n" +
"_subj(go, Big_Tom)\n" +
"_subj(go, Angry_Sue)\n" +
"conj_and(Big_Tom, Angry_Sue)\n");
//Correlative conjunction
rc &= test_sentence ("I could use neither the lorry nor the van.",
"_to-do(could, use)\n"+
"_quantity(lorry, neither)\n"+
"conj_neither_nor(lorry, van)\n"+
"_obj(use, lorry)\n"+
"_obj(use, van)\n"+
"_subj(use, I)\n");
report(rc, "Conjunction");
return rc;
}
public boolean test_extraposition()
{
boolean rc = true;
rc &= test_sentence ("The woman who lives next door is a registered nurse.",
"_obj(be, nurse)\n" +
"_subj(be, woman)\n" +
"_amod(nurse, registered)\n" +
"_advmod(live, next_door)\n" +
"_subj(live, woman)\n" +
"who(woman, live)\n");
rc &= test_sentence ("A player who is injured has to leave the field.",
"_to-do(have, leave)\n" +
"_subj(have, player)\n" +
"_obj(leave, field)\n" +
"_predadj(player, injured)\n" +
"who(player, injured)\n" );
rc &= test_sentence ("Pizza, which most people love, is not very healthy.",
"_advmod(very, not)\n" +
"_advmod(healthy, very)\n" +
"_obj(love, Pizza)\n" +
"_quantity(people, most)\n" +
"which(Pizza, love)\n" +
"_subj(love, people)\n" +
"_predadj(Pizza, healthy)\n" );
rc &= test_sentence ("The restaurant which belongs to my aunt is very famous.",
"_advmod(famous, very)\n" +
"to(belong, aunt)\n" +
"_subj(belong, restaurant)\n" +
"_poss(aunt, me)\n" +
"which(restaurant, belong)\n" +
"_predadj(restaurant, famous)\n");
rc &= test_sentence ("The books which I read in the library were written by Charles Dickens.",
"_obj(write, book)\n" +
"by(write, Charles_Dickens)\n" +
"_obj(read, book)\n" +
"in(read, library)\n" +
"_subj(read, I)\n" +
"which(book, read)\n");
rc &= test_sentence("This is the book whose author I met in a library.",
"_obj(be, book)\n" +
"_subj(be, this)\n" +
"_obj(meet, author)\n" +
"in(meet, library)\n" +
"_subj(meet, I)\n" +
"whose(book, author)\n");
rc &= test_sentence("The book that Jack lent me is very boring.",
"_advmod(boring, very)\n" +
"_iobj(lend, book)\n" +
"_obj(lend, me)\n" +
"_subj(lend, Jack)\n" +
"that(book, lend)\n" +
"_predadj(book, boring)\n");
rc &= test_sentence("They ate a special curry which was recommended by the restaurant’s owner.",
"_obj(eat, curry)\n" +
"_subj(eat, they)\n" +
"_obj(recommend, curry)\n" +
"by(recommend, owner)\n" +
"_poss(owner, restaurant)\n" +
"which(curry, recommend)\n" +
"_amod(curry, special)\n");
rc &= test_sentence("The dog who Jack said chased me was black.",
"_obj(chase, me)\n" +
"_subj(chase, dog)\n" +
"_subj(say, Jack)\n" +
"_predadj(dog, black)\n" +
"who(dog, chase)\n");
rc &= test_sentence("Jack, who hosted the party, is my cousin.",
"_obj(be, cousin)\n" +
"_subj(be, Jack)\n" +
"_poss(cousin, me)\n" +
"_obj(host, party)\n" +
"_subj(host, Jack)\n" +
"who(Jack, host)\n");
rc &= test_sentence("Jack, whose name is in that book, is the student near the window.",
"near(be, window)\n" +
"_obj(be, student)\n" +
"_subj(be, Jack)\n" +
"_pobj(in, book)\n" +
"_psubj(in, name)\n" +
"_det(book, that)\n" +
"whose(Jack, name)\n");
rc &= test_sentence("Jack stopped the police car that was driving fast.",
"_obj(stop, car)\n" +
"_subj(stop, Jack)\n" +
"_advmod(drive, fast)\n" +
"_predadj(car, drive)\n" +
"that(car, drive)\n" +
"_nn(car, police)\n");
rc &= test_sentence("Just before the crossroads, the car was stopped by a traffic sign that stood on the street.",
"_obj(stop, car)\n" +
"by(stop, sign)\n" +
"_advmod(stop, just)\n" +
"on(stand, street)\n" +
"_subj(stand, sign)\n" +
"that(sign, stand)\n" +
"_nn(sign, traffic)\n" +
"before(just, crossroads)\n");
report(rc, "Extrapostion");
return rc;
}
//Added by Matthew
public boolean test_inquisitives()
{
boolean rc = true;
rc &= test_sentence ("What is Socrates?",
"_obj(be, Socrates)\n" +
"_subj(be, _$qVar)\n");
rc &= test_sentence ("Who is the teacher?",
"_obj(be, teacher)\n" +
"_subj(be, _$qVar)\n");
rc &= test_sentence ("Who is a man?",
"_obj(be, man)\n" +
"_subj(be, _$qVar)\n");
rc &= test_sentence ("Who told you that bullshit?",
"_iobj(tell, you)\n" +
"_obj(tell, bullshit)\n" +
"_subj(tell, _$qVar)\n" +
"_det(bullshit, that)\n");
rc &= test_sentence ("Who told that story to the police?",
"to(tell, police)\n" +
"_obj(tell, story)\n" +
"_subj(tell, _$qVar)\n" +
"_det(story, that)\n");
rc &= test_sentence ("What gives you that idea?",
"_iobj(give, you)\n" +
"_obj(give, idea)\n" +
"_subj(give, _$qVar)\n" +
"_det(idea, that)\n");
rc &= test_sentence ("What gave that idea to the police?",
"to(give, police)\n" +
"_obj(give, idea)\n" +
"_subj(give, _$qVar)\n" +
"_det(idea, that)\n");
rc &= test_sentence ("What did you tell the fuzz?",
"_iobj(tell, fuzz)\n" +
"_obj(tell, _$qVar)\n" +
"_subj(tell, you)\n");
rc &= test_sentence ("What did you give to Mary?",
"to(give, Mary)\n" +
"_obj(give, _$qVar)\n" +
"_subj(give, you)\n");
rc &= test_sentence ("Who did you give the slavers?",
"_subj(give, you)\n");
rc &= test_sentence ("Who did you sell to the slavers?",
"to(sell, to)\n" +
"_subj(sell, you)\n");
rc &= test_sentence ("To whom did you sell the children?",
"to(sell, _$qVar)\n" +
"_obj(sell, child)\n" +
"_subj(sell, you)\n");
rc &= test_sentence ("To what do we owe the pleasure?",
"to(owe, _$qVar)\n" +
"_obj(owe, pleasure)\n" +
"_subj(owe, we)\n");
rc &= test_sentence ("Who did you sell the children to?",
"to(sell, to)\n" +
"_obj(sell, child)\n" +
"_subj(sell, you)\n");
rc &= test_sentence ("What bothers you?",
"_obj(bother, you)\n" +
"_subj(bother, _$qVar)\n");
rc &= test_sentence ("Who programmed you?",
"_obj(program, you)\n" +
"_subj(program, _$qVar)\n");
rc &= test_sentence ("What is on the table?",
"_pobj(on, table)\n" +
"_psubj(on, _$qVar)\n");
rc &= test_sentence ("What did you say?",
"_obj(say, _$qVar)\n" +
"_subj(say, you)\n");
rc &= test_sentence ("Who do you love?",
"_obj(love, _$qVar)\n" +
"_subj(love, you)\n");
rc &= test_sentence ("What is for dinner?",
"_pobj(for, dinner)\n" +
"_psubj(for, _$qVar)\n");
rc &= test_sentence ("Who's on first?",
"_pobj(on, first)\n" +
"_psubj(on, _$qVar)\n");
rc &= test_sentence ("Who farted?",
"_subj(fart, _$qVar)\n");
rc &= test_sentence ("What is happening?",
"_subj(happen, _$qVar)\n");
rc &= test_sentence ("Who is correct?",
"_predadj(_$qVar, correct)\n");
rc &= test_sentence ("What is right?",
"_predadj(_$qVar, right)\n");
rc &= test_sentence ("What are you doing?",
"_subj(_$qVar, you)\n");
rc &= test_sentence ("Are you the one?",
"_obj(be, one)\n" +
"_subj(be, you)\n");
rc &= test_sentence ("Are you mad?",
"_predadj(you, mad)\n");
rc &= test_sentence ("Is the book under the table?",
"under(book, table)\n" +
"_subj(be, book)\n");
rc &= test_sentence ("Does he seem mad?",
"_to-be(seem, mad)\n" +
"_subj(seem, he)\n");
rc &= test_sentence ("Does she want to help us?",
"_obj(help, us)\n" +
"_to-do(want, help)\n" +
"_subj(want, she)\n");
rc &= test_sentence ("Does she want you to help us?",
"_obj(help, us)\n" +
"_subj(help, you)\n" +
"_to-do(want, help)\n" +
"_subj(want, she)\n");
rc &= test_sentence ("Was she good enough to help?",
"_predadj(she, good)\n" +
"_to-do(good, help)\n");
rc &= test_sentence ("Must she be able to sing?",
"_to-do(able, sing)\n" +
"_predadj(she, able)\n");
rc &= test_sentence ("Does she want to sing?",
"_to-do(want, sing)\n" +
"_subj(want, she)\n");
rc &= test_sentence ("Have you slept?",
"_subj(sleep, you)\n");
rc &= test_sentence ("Will you sleep?",
"_subj(sleep, you)\n");
rc &= test_sentence ("Did you sleep?",
"_subj(sleep, you)\n");
rc &= test_sentence ("Did you eat the leftover baba-ganoush?",
"_obj(eat, leftover)\n" +
"_to-be(eat, baba-ganoush)\n" +
"_subj(eat, you)\n");
rc &= test_sentence ("Did you give her the money?",
"_iobj(give, her)\n" +
"_obj(give, money)\n" +
"_subj(give, you)\n");
rc &= test_sentence ("Did you give the money to her?",
"to(give, her)\n" +
"_obj(give, money)\n" +
"_subj(give, you)\n");
rc &= test_sentence ("The book is under the table?",
"_pobj(under, table)\n" +
"_psubj(under, book)\n");
rc &= test_sentence ("Maybe she eats lunch.",
"_obj(eat, lunch)\n" +
"_advmod(eat, maybe)\n" +
"_subj(eat, she)\n");
rc &= test_sentence ("Perhaps she is nice.",
"_advmod(nice, perhaps)\n" +
"_predadj(she, nice)\n");
rc &= test_sentence ("She wants to help John.",
"_to-do(want, help)\n" +
"_subj(want, she)\n" +
"_obj(help, John)\n");
rc &= test_sentence ("She wants you to help us.",
"_to-do(want, help)\n" +
"_subj(want, she)\n" +
"_obj(help, us)\n" +
"_subj(help, you)\n");
rc &= test_sentence ("She is nice to help with the project.",
"with(help, project)\n" +
"_to-do(nice, help)\n" +
"_predadj(she, nice)\n");
rc &= test_sentence ("She must be able to sing.",
"_to-do(able, sing)\n" +
"_predadj(she, able)\n");
rc &= test_sentence ("She must need to sing?",
"_to-do(need, sing)\n" +
"_subj(need, she)\n");
rc &= test_sentence ("She must want to sing?",
"_to-do(want, sing)\n" +
"_subj(want, she)\n");
rc &= test_sentence ("She wants to sing.",
"_to-do(want, sing)\n" +
"_subj(want, she)\n");
rc &= test_sentence ("Where do you live?",
"_%atLocation(live, _$qVar)\n" +
"_subj(live, you)\n");
rc &= test_sentence ("Where did you eat dinner?",
"_%atLocation(eat, _$qVar)\n" +
"_obj(eat, dinner)\n" +
"_subj(eat, you)\n");
rc &= test_sentence ("Where is the party?",
"_%atLocation(_%copula, _$qVar)\n" +
"_subj(_%copula, party)\n");
rc &= test_sentence ("Where will she be happy?",
"_%atLocation(happy, _$qVar)\n" +
"_predadj(she, happy)\n");
rc &= test_sentence ("When did jazz die?",
"_%atTime(die, _$qVar)\n" +
"_subj(die, jazz)\n");
rc &= test_sentence ("When did you bake the cake?",
"_%atTime(bake, _$qVar)\n" +
"_obj(bake, cake)\n" +
"_subj(bake, you)\n");
rc &= test_sentence ("When did you give him the money?",
"_iobj(give, him)\n" +
"_%atTime(give, _$qVar)\n" +
"_obj(give, money)\n" +
"_subj(give, you)\n");
rc &= test_sentence ("When is the party?",
"_%atTime(_%copula, _$qVar)\n" +
"_subj(_%copula, party)\n");
rc &= test_sentence ("Why do you live?",
"_subj(live, you)\n");
rc &= test_sentence ("Why do you like terrible music?",
"_obj(like, music)\n" +
"_subj(like, you)\n" +
"_amod(music, terrible)\n");
rc &= test_sentence ("Why are you such a fool?",
"_obj(be, fool)\n" +
"_subj(be, you)\n");
rc &= test_sentence ("How did you sleep?",
"how(sleep, _$qVar)\n" +
"_subj(sleep, you)\n");
rc &= test_sentence ("How was the party?",
"how(_%copula, _$qVar)\n" +
"_subj(_%copula, party)\n");
rc &= test_sentence ("How is your food?",
"_poss(food, you)\n" +
"how(_%copula, _$qVar)\n" +
"_subj(_%copula, food)\n");
rc &= test_sentence ("How much money does it cost?",
"_obj(cost, money)\n" +
"_subj(cost, it)\n" +
"_quantity(money, _$qVar)\n");
rc &= test_sentence ("How many books have you read?",
"_obj(read, book)\n" +
"_subj(read, you)\n" +
"_quantity(book, _$qVar)\n");
rc &= test_sentence ("How fast does it go?",
"_advmod(fast, _$qVar)\n" +
"_subj(go, it)\n");
rc &= test_sentence ("Which girl do you like?",
"_obj(like, girl)\n" +
"_subj(like, you)\n" +
"_quantity(girl, _$qVar)\n");
rc &= test_sentence ("Which girl likes you?",
"_obj(like, you)\n" +
"_subj(like, girl)\n" +
"_quantity(girl, _$qVar)\n");
rc &= test_sentence ("Which girl is crazy?",
"_quantity(girl, _$qVar)\n" +
"_predadj(girl, crazy)\n");
rc &= test_sentence ("The books were written by Charles Dickens.",
"_obj(write, book)\n" +
"by(write, Charles_Dickens)\n");
rc &= test_sentence ("The books are published.",
"_obj(publish, book)\n");
rc &= test_sentence ("I did my homework, and I went to school.",
"_obj(do, homework)\n" +
"_subj(do, I)\n" +
"to(go, school)\n" +
"_subj(go, I)\n" +
"_poss(homework, me)\n");
rc &= test_sentence ("John and Madison eat the cake.",
"_obj(eat, cake)\n" +
"_subj(eat, John)\n" +
"_subj(eat, Madison)\n" +
"conj_and(John, Madison)\n");
rc &= test_sentence ("Joan is poor but happy.",
"_predadj(Joan, poor)\n" +
"_predadj(Joan, happy)\n");
rc &= test_sentence ("I think that dogs can fly.",
"that(think, fly)\n" +
"_subj(think, I)\n" +
"_subj(fly, dog)\n");
rc &= test_sentence ("He is glad that she won.",
"that(glad, win)\n" +
"_subj(win, she)\n" +
"_predadj(he, glad)\n");
rc &= test_sentence ("He ran so quickly that he flew.",
"_advmod(quickly, so)\n" +
"_subj(fly, he)\n" +
"that(run, fly)\n" +
"_advmod(run, quickly)\n" +
"_subj(run, he)\n");
rc &= test_sentence ("I had dinner at 6 pm",
"_obj(have, dinner)\n" +
"at(have, pm)\n" +
"_subj(have, I)\n" +
"_time(pm, 6)\n");
rc &= test_sentence ("I went to sleep at 1 am",
"to(go, sleep)\n" +
"_subj(go, I)\n" +
"_time(am, 1)\n" +
"at(sleep, am)\n");
rc &= test_sentence ("Who farted?",
"_subj(fart, _$qVar)\n");
rc &= test_sentence ("What happened?",
"_subj(happen, _$qVar)\n");
rc &= test_sentence ("What killed him?",
"_obj(kill, him)\n" +
"_subj(kill, _$qVar)\n");
rc &= test_sentence ("Who ate the pizza?",
"_obj(eat, pizza)\n" +
"_subj(eat, _$qVar)\n");
rc &= test_sentence ("What gave you that idea?",
"_iobj(give, you)\n" +
"_obj(give, idea)\n" +
"_subj(give, _$qVar)\n" +
"_det(idea, that)\n");
rc &= test_sentence ("Who told you that?",
"_iobj(tell, you)\n" +
"_obj(tell, that)\n" +
"_subj(tell, _$qVar)\n");
rc &= test_sentence ("What is for dinner?",
"_pobj(for, dinner)\n" +
"_psubj(for, _$qVar)\n");
rc &= test_sentence ("Who's on first?",
"_pobj(on, first)\n" +
"_psubj(on, _$qVar)\n");
rc &= test_sentence ("Who are you?",
"_subj(_%copula, you)\n");
rc &= test_sentence ("Who do you love?",
"_obj(love, _$qVar)\n" +
"_subj(love, you)\n");
rc &= test_sentence ("What do you think?",
"_obj(think, _$qVar)\n" +
"_subj(think, you)\n");
rc &= test_sentence ("To whom did you sell the children?",
"to(sell, _$qVar)\n" +
"_obj(sell, child)\n" +
"_subj(sell, you)\n");
rc &= test_sentence ("Why did you give him the money?",
"_iobj(give, him)\n" +
"_obj(give, money)\n" +
"_subj(give, you)\n");
rc &= test_sentence ("Why are you so stupid?",
"_advmod(stupid, so)\n" +
"_predadj(you, stupid)\n");
rc &= test_sentence ("How did you like the movie?",
"_obj(like, movie)\n" +
"how(like, _$qVar)\n" +
"_subj(like, you)\n");
rc &= test_sentence ("How did you send him the message?",
"_iobj(send, him)\n" +
"_obj(send, message)\n" +
"how(send, _$qVar)\n" +
"_subj(send, you)\n");
report(rc, "Inquisitives");
return rc;
}
public static void main(String[] args)
{
setUpClass();
TestRelEx ts = new TestRelEx();
ts.runTests();
}
// @Test
public void runTests() {
TestRelEx ts = this;
boolean rc = true;
rc &= ts.test_determiners();
rc &= ts.test_time();
rc &= ts.test_comparatives();
rc &= ts.test_equatives();
rc &= ts.test_extraposition();
rc &= ts.test_conjunctions();
rc &= ts.test_inquisitives();
if (rc) {
System.err.println("Tested " + ts.pass + " sentences, test passed OK");
} else {
System.err.println("Test failed\n\t" +
ts.fail + " sentences failed\n\t" +
ts.pass + " sentences passed");
}
System.err.println("******************************");
System.err.println("Failed test sentences on Relex");
System.err.println("******************************");
if (sentfail.isEmpty())
System.err.println("All test sentences passed");
for(String temp : sentfail){
System.err.println(temp);
}
System.err.println("******************************\n");
}
}
|
src/java_test/relex/test/TestRelEx.java
|
/*
* Copyright 2009 Linas Vepstas
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package relex.test;
import java.util.ArrayList;
import java.util.Collections;
import org.junit.BeforeClass;
import org.junit.Test;
import relex.ParsedSentence;
import relex.RelationExtractor;
import relex.Sentence;
import relex.output.SimpleView;
public class TestRelEx
{
private static RelationExtractor re;
private int pass;
private int fail;
private int subpass;
private int subfail;
private static ArrayList<String> sentfail= new ArrayList<String>();
@BeforeClass
public static void setUpClass() {
re = new RelationExtractor();
}
public TestRelEx()
{
pass = 0;
fail = 0;
subpass = 0;
subfail = 0;
}
public ArrayList<String> split(String a)
{
String[] sa = a.split("\n");
ArrayList<String> saa = new ArrayList<String>();
for (String s : sa) {
saa.add(s);
}
Collections.sort (saa);
return saa;
}
/**
* First argument is the sentence.
* Second argument is a list of the relations that RelEx
* should be generating.
* Return true if RelEx generates the same dependencies
* as the second argument.
*/
public boolean test_sentence (String sent, String sf)
{
re.do_penn_tagging = false;
re.setMaxParses(1);
Sentence sntc = re.processSentence(sent);
ParsedSentence parse = sntc.getParses().get(0);
String rs = SimpleView.printBinaryRelations(parse);
String urs = SimpleView.printUnaryRelations(parse);
ArrayList<String> exp = split(sf);
ArrayList<String> brgot = split(rs);
ArrayList<String> urgot = split(urs);
// add number of binary relations from parser-output,
// to total number of relationships gotten
int sizeOfGotRelations= brgot.size();
// check expected binary and unary relations
// the below for-loop checks whether all expected binary relations are
// contained in the parser-binary-relation-output arrayList "brgot".
// if any unary relations are expected in the output it checks the
// parser-unary-relation-output arrayList "urgot" for unary relationships
for (int i=0; i< exp.size(); i++)
{
if(!brgot.contains(exp.get(i)))
{
if(!urgot.contains(exp.get(i)))
{
System.err.println("Error: content miscompare:\n" +
"\tExpected = " + exp + "\n" +
"\tGot Binary Relations = " + brgot + "\n" +
"\tGot Unary Relations = " + urgot + "\n" +
"\tSentence = " + sent);
subfail ++;
fail ++;
sentfail.add(sent);
return false;
}
// add the unary relation, count to total number of
// binary relations
sizeOfGotRelations++;
}
}
// The size checking of the expected relationships vs output
// relationships is done here purposefully, to accommodate if
// there is any unary relationships present in the expected
// output(see above for-loop also). However it only checks
// whether parser-output resulted more relationships(binary+unary)
// than expected relations. If the parser-output resulted in
// fewer relationships(binary+unary) than expected it would
// catch that in the above for-loop.
if (exp.size() < sizeOfGotRelations)
{
System.err.println("Error: size miscompare:\n" +
"\tExpected = " + exp + "\n" +
"\tGot Binary Relations = " + brgot + "\n" +
"\tGot Unary Relations = " + urgot + "\n" +
"\tSentence = " + sent);
subfail ++;
fail ++;
sentfail.add(sent);
return false;
}
subpass ++;
pass ++;
return true;
}
public void report(boolean rc, String subsys)
{
if (rc) {
System.err.println(subsys + ": Tested " + pass +
" sentences, test passed OK");
} else {
System.err.println(subsys + ": Test failed\n\t" +
fail + " sentences failed\n\t" +
pass + " sentences passed");
}
subpass = 0;
subfail = 0;
}
public boolean test_determiners()
{
boolean rc = true;
rc &= test_sentence ("Ben ate my cookie.",
"_subj(eat, Ben)\n" +
"_obj(eat, cookie)\n" +
"_poss(cookie, me)\n");
rc &= test_sentence ("Ben ate that cookie.",
"_subj(eat, Ben)\n" +
"_obj(eat, cookie)\n" +
"_det(cookie, that)\n");
rc &= test_sentence ("All my writings are bad.",
"_predet(writings, all)\n" +
"_poss(writings, me)\n" +
"_predadj(writings, bad)\n");
rc &= test_sentence ("All his designs are bad.",
"_predet(design, all)\n" +
"_poss(design, him)\n" +
"_predadj(design, bad)\n");
rc &= test_sentence ("All the boys knew it.",
"_subj(know, boy)\n" +
"_obj(know, it)\n" +
"_predet(boy, all)\n");
rc &= test_sentence ("Joan thanked Susan for all the help she had given.",
"for(thank, help)\n" +
"_subj(thank, Joan)\n" +
"_obj(thank, Susan)\n" +
"_predet(help, all)\n" +
"_subj(give, she)\n" +
"_obj(give, help)\n");
report(rc, "Determiners");
return rc;
}
public boolean test_time()
{
boolean rc = true;
rc &= test_sentence("I had breakfast at 8 am.",
"_obj(have, breakfast)\n"+
"at(have, am)\n" +
"_subj(have, I)\n" +
"_time(am, 8)\n");
rc &= test_sentence("I had supper before 6 pm.",
"_obj(have, supper)\n" +
"before(have, pm)\n" +
"_subj(have, I)\n" +
"_time(pm, 6)\n");
report(rc, "Time");
return rc;
}
public boolean test_comparatives()
{
boolean rc = true;
rc &= test_sentence ("Some people like pigs less than dogs.",
"_advmod(like, less)\n" +
"_obj(like, pig)\n" +
"_quantity(people, some)\n" +
"_subj(like, people)\n" +
"than(pig, dog)\n");
rc &= test_sentence ("Some people like pigs more than dogs.",
"_advmod(like, more)\n" +
"_obj(like, pig)\n" +
"_quantity(people, some)\n" +
"_subj(like, people)\n" +
"than(pig, dog)\n");
//Non-equal Gradable : Two entities one feature "more/less"
rc &= test_sentence ("He is more intelligent than John.",
"than(he, John)\n" +
"_comparative(intelligent, he)\n" +
"degree(intelligent, comparative)\n"+
"_advmod(intelligent, more)\n"+
"_predadj(he, intelligent)\n");
rc &= test_sentence ("He is less intelligent than John.",
"than(he, John)\n" +
"_comparative(intelligent, he)\n" +
"degree(intelligent, comparative)\n"+
"_advmod(intelligent, less)\n"+
"_predadj(he, intelligent)\n");
rc &= test_sentence ("He runs more quickly than John.",
"_advmod(run, quickly)\n"+
"_advmod(quickly, more)\n"+
"_subj(run, he)\n" +
"than(he, John)\n" +
"_comparative(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs less quickly than John.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"_advmod(quickly, less)\n"+
"than(he, John)\n" +
"_comparative(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs more quickly than John does.",
"_advmod(run, quickly)\n" +
"_advmod(quickly, more)\n"+
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"than(he, John)\n" +
"_comparative(quickly, run)\n" +
"degree(quickly, comparative)\n");
// This sentence is ungrammatical but commonly used by
// non-native English speakers
rc &= test_sentence ("He runs less quickly than John does.",
"_advmod(run, quickly)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"_advmod(quickly, less)\n"+
"than(he, John)\n" +
"_comparative(quickly, run)\n" +
"degree(quickly, comparative)\n");
rc &= test_sentence ("He runs slower than John does.",
"_advmod(run, slow)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"than(he, John)\n" +
"_comparative(slow, run)\n" +
"degree(slow, comparative)\n");
rc &= test_sentence ("He runs more than John.",
"_obj(run, more)\n" +
"_subj(run, he)\n" +
"than(he, John)\n"+
"_comparative(more, run)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He runs less than John.",
"_obj(run, less)\n" +
"_subj(run, he)\n" +
"than(he, John)\n"+
"_comparative(less, run)\n"+
"degree(less, comparative)\n");
rc &= test_sentence ("He runs faster than John.",
"than(he, John)\n" +
"_comparative(fast, run)\n" +
"_subj(run, he)\n"+
"_advmod(run, fast)\n"+
"degree(fast, comparative)\n");
rc &= test_sentence ("He runs more slowly than John.",
"than(he, John)\n" +
"_subj(run, he)\n" +
"_advmod(slowly, more)\n"+
"_comparative(slowly, run)\n"+
"_advmod(run, slowly)\n"+
"degree(slowly, comparative)\n");
rc &= test_sentence ("He runs less slowly than John.",
"than(he, John)\n" +
"_subj(run, he)\n" +
"_comparative(slowly, run)\n"+
"_advmod(run, slowly)\n"+
"_advmod(slowly, less)\n"+
"degree(slowly, comparative)\n");
rc &= test_sentence ("He runs more miles than John does.",
"than(he, John)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"_obj(run, mile)\n"+
"_comparative(mile, run)\n"+
"_quantity(mile, more)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He runs fewer miles than John does.",
"than(he, John)\n" +
"_subj(run, he)\n" +
"_subj(do, John)\n"+
"_obj(run, mile)\n"+
"_comparative(mile, run)\n"+
"_quantity(mile, fewer)\n"+
"degree(fewer, comparative)\n");
rc &= test_sentence ("He runs many more miles than John does.",
"than(he, John)\n" +
"_comparative(mile, run)\n"+
"_obj(run, mile)\n"+
"_subj(run, he)\n" +
"_subj(do, John)\n" +
"_quantity(mile, many)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He runs ten more miles than John.",
"_obj(run, mile)\n"+
"_subj(run, he)\n" +
"than(he, John)\n" +
"_comparative(mile, run)\n"+
"_quantity(mile, ten)\n" +
"numeric-FLAG(ten, T)\n" +
"degree(more, comparative)\n");
rc &= test_sentence ("He runs almost ten more miles than John does.",
"_obj(run, mile)\n"+
"_subj(run, he)\n"+
"_comparative(mile, run)\n"+
"_subj(do, John)\n"+
"than(he, John)\n"+
"_quantity_mod(ten, almost)\n"+
"_quantity(mile, ten)\n"+
"numeric-FLAG(ten, T)\n" +
"degree(more, comparative)\n");
rc &= test_sentence ("He runs more often than John.",
"_subj(run, he)\n"+
"_advmod(often, more)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(he, John)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("He runs less often than John.",
"_subj(run, he)\n"+
"_advmod(often, less)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(he, John)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("He runs here more often than John.",
"_advmod(run, here)\n"+
"_advmod(often, more)\n"+
"_advmod(run, often)\n"+
"_subj(run, he)\n"+
"_comparative(often, run)\n"+
"than(he, John)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("He runs here less often than John.",
"_advmod(run, here)\n"+
"_advmod(often, less)\n"+
"_advmod(run, often)\n"+
"_subj(run, he)\n"+
"_comparative(often, run)\n"+
"than(he, John)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("He is faster than John.",
"than(he, John)\n"+
"_predadj(he, fast)\n"+
"_comparative(fast, he)\n"+
"degree(fast, comparative)\n");
rc &= test_sentence ("He is faster than John is.",
"than(he, John)\n"+
"_predadj(he, fast)\n"+
"_subj(be, John)\n"+
"_comparative(fast, he)\n"+
"degree(fast, comparative)\n");
rc &= test_sentence ("His speed is faster than John's.",
"than(speed, be)\n"+
"_predadj(speed, fast)\n"+
"_poss(speed, him)\n"+
"_comparative(fast, speed)\n"+
"degree(fast, comparative)\n");
rc &= test_sentence ("I run more than Ben.",
"_subj(run, I)\n"+
"_obj(run, more)\n"+
"_comparative(more, run)\n"+
"than(I, Ben)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("I run less than Ben.",
"_subj(run, I)\n"+
"_obj(run, less)\n"+
"_comparative(less, run)\n"+
"than(I, Ben)\n"+
"degree(less, comparative)\n");
rc &= test_sentence ("I run more miles than Ben.",
"_subj(run, I)\n"+
"_obj(run, mile)\n"+
"_quantity(mile, more)\n"+
"_comparative(mile, run)\n"+
"than(I, Ben)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("I run fewer miles than Ben.",
"_subj(run, I)\n"+
"_obj(run, mile)\n"+
"_quantity(mile, fewer)\n"+
"_comparative(mile, run)\n"+
"than(I, Ben)\n"+
"degree(fewer, comparative)\n");
rc &= test_sentence ("I run 10 more miles than Ben.",
"_subj(run, I)\n"+
"_obj(run, mile)\n"+
"_quantity(mile, 10)\n"+
"_comparative(mile, run)\n"+
"than(I, Ben)\n"+
"numeric-FLAG(10, T)\n" +
"degree(more, comparative)\n");
rc &= test_sentence ("I run 10 fewer miles than Ben.",
"_subj(run, I)\n"+
"_obj(run, mile)\n"+
"_quantity(mile, 10)\n"+
"_comparative(mile, run)\n"+
"than(I, Ben)\n"+
"numeric-FLAG(10, T)\n" +
"degree(fewer, comparative)\n");
rc &= test_sentence ("I run more often than Ben.",
"_subj(run, I)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(often, more)\n");
rc &= test_sentence ("I run less often than Ben.",
"_subj(run, I)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(often, less)\n");
rc &= test_sentence ("I run more often than Ben does.",
"_subj(run, I)\n"+
"_subj(do, Ben)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(often, more)\n");
rc &= test_sentence ("I run less often than Ben does.",
"_subj(run, I)\n"+
"_subj(do, Ben)\n"+
"_advmod(run, often)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(often, less)\n");
rc &= test_sentence ("I run more often than Ben climbs.",
"_subj(run, I)\n"+
"_subj(climb, Ben)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(run, often)\n"+
"_advmod(often, more)\n");
rc &= test_sentence ("I run less often than Ben climbs.",
"_subj(run, I)\n"+
"_subj(climb, Ben)\n"+
"_comparative(often, run)\n"+
"than(I, Ben)\n"+
"degree(often, comparative)\n"+
"_advmod(run, often)\n"+
"_advmod(often, less)\n");
rc &= test_sentence ("I run more races than Ben wins contests.",
"_subj(run, I)\n"+
"_obj(run, race)\n"+
"_subj(win, Ben)\n"+
"_obj(win, contest)\n"+
"_quantity(race, more)\n"+
"_comparative(race, run)\n"+
"than(I, Ben)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("I run fewer races than Ben wins contests.",
"_subj(run, I)\n"+
"_obj(run, race)\n"+
"_subj(win, Ben)\n"+
"_obj(win, contest)\n"+
"_quantity(race, fewer)\n"+
"_comparative(race, run)\n"+
"than(I, Ben)\n"+
"degree(fewer, comparative)\n");
rc &= test_sentence ("I have more chairs than Ben.",
"_obj(have, chair)\n"+
"_subj(have, I)\n"+
"than(I, Ben)\n"+
"_comparative(chair, have)\n"+
"_quantity(chair, more)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("I have fewer chairs than Ben.",
"_obj(have, chair)\n"+
"_subj(have, I)\n"+
"than(I, Ben)\n"+
"_comparative(chair, have)\n"+
"_quantity(chair, fewer)\n"+
"degree(fewer, comparative)\n");
rc &= test_sentence ("He earns much more money than I do.",
"_obj(earn, money)\n"+
"_subj(do, I)\n"+
"_subj(earn, he)\n"+
"than(he, I)\n"+
"_comparative(money, earn)\n"+
"_quantity(money, more)\n"+
"_advmod(more, much)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He earns much less money than I do.",
"_obj(earn, money)\n"+
"_subj(do, I)\n"+
"_subj(earn, he)\n"+
"than(he, I)\n"+
"_comparative(money, earn)\n"+
"_quantity(money, less)\n"+
"_advmod(less, much)\n"+
"degree(less, comparative)\n");
rc &= test_sentence ("She comes here more often than her husband.",
"_advmod(come, here)\n"+
"_advmod(often, more)\n"+
"_advmod(come, often)\n"+
"_subj(come, she)\n"+
"_poss(husband, her)\n"+
"_comparative(often, come)\n"+
"than(she, husband)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("She comes here less often than her husband.",
"_advmod(come, here)\n"+
"_advmod(often, less)\n"+
"_advmod(come, often)\n"+
"_subj(come, she)\n"+
"_poss(husband, her)\n"+
"_comparative(often, come)\n"+
"than(she, husband)\n"+
"degree(often, comparative)\n");
rc &= test_sentence ("Russian grammar is more difficult than English grammar.",
"_comparative(difficult, grammar)\n"+
"than(grammar, grammar)\n"+
"_amod(grammar, Russian)\n"+ //When link-grammar uses A, relex should use _amod it will use A instead of AN; will be updated in next linkgrammer version
"_predadj(grammar, difficult)\n"+
"_amod(grammar, English)\n"+
"_advmod(difficult, more)\n"+
"degree(difficult, comparative)\n");
rc &= test_sentence ("Russian grammar is less difficult than English grammar.",
"_comparative(difficult, grammar)\n"+
"than(grammar, grammar)\n"+
"_amod(grammar, Russian)\n"+
"_predadj(grammar, difficult)\n"+
"_amod(grammar, English)\n"+
"_advmod(difficult, less)\n"+
"degree(difficult, comparative)\n");
rc &= test_sentence ("My sister is much more intelligent than me.",
"_amod(much, intelligent)\n"+
"_predadj(sister, intelligent)\n"+
"_poss(sister, me)\n"+
"than(sister, me)\n"+
"_comparative(intelligent, sister)\n"+
"_advmod(intelligent, more)\n"+
"degree(intelligent, comparative)\n");
rc &= test_sentence ("My sister is much less intelligent than me.",
"_amod(much, intelligent)\n"+
"_predadj(sister, intelligent)\n"+
"_poss(sister, me)\n"+
"than(sister, me)\n"+
"_comparative(intelligent, sister)\n"+
"_advmod(intelligent, less)\n"+
"degree(intelligent, comparative)\n");
rc &= test_sentence ("I find maths lessons more enjoyable than science lessons.",
"_iobj(find, maths)\n"+
"_obj(find, lesson)\n"+
"_subj(find, I)\n"+
"_amod(lesson, enjoyable)\n"+
"_nn(lesson, science)\n"+
"than(maths, science)\n"+
"_comparative(enjoyable, maths)\n"+
"_advmod(enjoyable, more)\n"+
"degree(enjoyable, comparative)\n");
rc &= test_sentence ("I find maths lessons less enjoyable than science lessons.",
"_iobj(find, maths)\n"+
"_obj(find, lesson)\n"+
"_subj(find, I)\n"+
"_amod(lesson, enjoyable)\n"+
"_nn(lesson, science)\n"+
"than(maths, science)\n"+
"_comparative(enjoyable, maths)\n"+
"_advmod(enjoyable, less)\n"+
"degree(enjoyable, comparative)\n");
// Comparatives Without More/less terms
rc &= test_sentence ("Her great-grandson is nicer than her great-granddaughter.",
"than(great-grandson, great-granddaughter)\n"+
"_predadj(great-grandson, nice)\n"+
"_poss(great-grandson, her)\n"+
"_poss(great-granddaughter, her)\n"+
"_comparative(nice, great-grandson)\n"+
"degree(nice, comparative)\n");
rc &= test_sentence ("George is cleverer than Norman.",
"than(George, Norman)\n"+
"_predadj(George, clever)\n"+
"_comparative(clever, George)\n"+
"degree(clever, comparative)\n");
rc &= test_sentence ("Kim is taller than Linda.",
"than(Kim, Linda)\n"+
"_predadj(Kim, tall)\n"+
"_comparative(tall, Kim)\n"+
"degree(tall, comparative)\n");
rc &= test_sentence ("Venus is brighter than Mars.",
"than(Venus, Mars)\n"+
"_predadj(Venus, bright)\n"+
"_comparative(bright, Venus)\n"+
"degree(bright, comparative)\n");
rc &= test_sentence ("Mary is shorter than Jane.",
"than(Mary, Jane)\n"+
"_predadj(Mary, short)\n"+
"_comparative(short, Mary)\n"+
"degree(short, comparative)\n");
rc &= test_sentence ("I am happier than you.",
"than(I, you)\n"+
"_predadj(I, happy)\n"+
"_comparative(happy, I)\n"+
"degree(happy, comparative)");
rc &= test_sentence ("His house is bigger than hers.",
"than(house, hers)\n"+
"_predadj(house, big)\n"+
"_poss(house, him)\n"+
"_comparative(big ,house)\n"+
"degree(big, comparative)");
rc &= test_sentence ("She is two years older than me.",
"_obj(is, year)\n"+
"_amod(years, old)\n"+
"_quantity(year, two)\n"+
"numeric-FLAG(two, T)\n" +
"than(she, me)\n"+
"_comparative(old, she)\n"+
"degree(old, comparative)");
rc &= test_sentence ("New York is much bigger than Boston.",
"_subj(is, New_York)\n"+
"_amod(much, big)\n"+
"than(New_York, Boston)\n"+
"_comparative(big, New_York)\n"+
"degree(big, comparative)");
rc &= test_sentence ("He is a better player than Ronaldo.",
"_obj(be, player)\n"+
"_subj(be, he)\n"+
"_amod(player, good)\n"+
"than(he, Ronaldo)\n"+
"_comparative(good, he)\n"+
"degree(good, comparative)");
rc &= test_sentence ("France is a bigger country than Britain.",
"_obj(is, country)\n"+
"_subj(is, France)\n"+
"_amod(country, big)\n"+
"than(France, Britain)\n"+
"_comparative(big, France)\n"+
"degree(big, comparative)\n");
rc &= test_sentence ("That joke was funnier than his joke.",
"_predadj(joke, funny)\n"+
"than(joke, joke)\n"+
"_det(joke, that)\n"+
"_poss(joke, him)\n"+
"_comparative(funny, joke)\n"+
"degree(funny, comparative)");
rc &= test_sentence ("Our car is bigger than your car.",
"than(car, car)\n"+
"_predadj(car, big)\n"+
"_poss(car, us)\n"+
"_det(car, you)\n"+
"_poss(car, you)\n"+
"_comparative(big, car)\n"+
"degree(big, comparative)");
// Sentences need to check
rc &= test_sentence ("This computer is better than that one.",
"than(computer, one)\n"+
"_det(computer, this)\n"+
"_predadj(computer, good)\n"+
"_det(one, that)\n"+
"degree(good, comparative)\n"+
"_comparative(good, computer)\n");
rc &= test_sentence ("He's simpler than I thought.",
"than(he, I)\n"+
"_subj(think, I)\n"+
"_comparative(simple, he)\n"+
"_predadj(he, simple)\n"+
"degree(simple, comparative)\n");
rc &= test_sentence ("She's stronger at chess than I am.",
"at(strong, chess)\n"+
"than(she, I)\n"+
"_predadj(she, strong)\n"+
"degree(strong, comparative)\n"+
"_comparative(strong, she)\n");
rc &= test_sentence ("She's prettier than her mother.",
"_predadj(she, pretty)\n"+
"than(she, mother)\n"+
"_poss(mother, her)\n"+
"_comparative(pretty, she)\n"+
"degree(pretty, comparative)\n");
rc &= test_sentence ("This exam was more difficult than the other.",
"than(exam, other)\n"+
"_det(exam, this)\n"+
"_predadj(exam, difficult)\n"+
"_advmod(difficult, more)\n"+
"_comparative(difficult, exam)\n"+
"degree(difficult, comparative)\n");
rc &= test_sentence ("It's much colder today than it was yesterday.",
"_subj(be, it)\n"+
"than(today, yesterday)\n"+
"_advmod(cold, today)\n"+
"_advmod(cold, yesterday)\n"+
"_predadj(it, cold)\n"+
"_comparative(cold, it)\n"+
"degree(cold, comparative)\n");
rc &= test_sentence ("This grammar topic is easier than most others.",
"than(topic, others)\n"+
"_det(topic, this)\n"+
"_nn(topic, grammar)\n"+
"_predadj(topic, easy)\n"+
"_quantity(others, most)\n"+
"_comparative(easy, topic)\n"+
"degree(easy, comparative)\n");
rc &= test_sentence ("I find science more difficult than mathematics.",
"_obj(find, science)\n"+
"_subj(find, I)\n"+
"_advmod(difficult, more)\n"+
"than(science, mathematics)\n"+
"_comparative(difficult, science)\n"+
"degree(difficult, comparative)\n");
//one entity two or more features
rc &= test_sentence ("He is more intelligent than attractive.",
"than(intelligent, attractive)\n"+
"_predadj(he, intelligent)\n"+
"_advmod(intelligent, more)\n"+
"_comparative(intelligent, he)\n"+
"degree(intelligent, comparative)\n");
rc &= test_sentence ("He is less intelligent than attractive.",
"than(intelligent, attractive)\n"+
"_predadj(he, intelligent)\n"+
"_advmod(intelligent, less)\n"+
"_comparative(intelligent, he)\n"+
"degree(intelligent, comparative)\n");
rc &= test_sentence ("The dog was more hungry than angry.",
"_predadj(dog, hungry)\n"+
"than(hungry, angry)\n"+
"_advmod(hungry, more)\n"+
"_comparative(hungry, dog)\n"+
"degree(hungry, comparative)\n");
rc &= test_sentence ("The dog was less hungry than angry.",
"_predadj(dog, hungry)\n"+
"than(hungry, angry)\n"+
"_advmod(hungry, less)\n"+
"_comparative(hungry, dog)\n"+
"degree(hungry, comparative)\n");
rc &= test_sentence ("He did it more quickly than carefully.",
"_obj(do, it)\n"+
"_subj(do, he)\n"+
"than(quickly, carefully)\n"+
"_advmod(do, quickly)\n"+
"_advmod(quickly, more)\n"+
"_comparative(quickly, do)\n"+
"degree(quickly, comparative)\n");
rc &= test_sentence ("He did it less quickly than carefully.",
"_obj(do, it)\n"+
"_subj(do, he)\n"+
"than(quickly, carefully)\n"+
"_advmod(do, quickly)\n"+
"_advmod(quickly, less)\n"+
"_comparative(quickly, do)\n"+
"degree(quickly, comparative)\n");
rc &= test_sentence ("He has more money than time.",
"_obj(have, money)\n"+
"_subj(have, he)\n"+
"than(money, time)\n"+
"_quantity(money, more)\n"+
"_comparative(money, have)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He has less money than time.",
"_obj(have, money)\n"+
"_subj(have, he)\n"+
"than(money, time)\n"+
"_quantity(money, less)\n"+
"_comparative(money, have)\n"+
"degree(less, comparative)\n");
rc &= test_sentence ("He plays more for money than for pleasure.",
"_subj(play, he)\n"+
"_obj(play, more)\n"+
"for(play, money)\n"+
"for(than, pleasure)\n"+
"than(money, pleasure)\n"+
"_comparative(more, play)\n"+
"degree(more, comparative)\n");
rc &= test_sentence ("He plays less for money than for pleasure.",
"_subj(play, he)\n"+
"_obj(play, less)\n"+
"for(play, money)\n"+
"for(than, pleasure)\n"+
"than(money, pleasure)\n"+
"_comparative(less, play)\n"+
"degree(less, comparative)\n");
//two entities two features
rc &= test_sentence ("Jack is more ingenious than Ben is crazy.",
"_predadj(Jack, ingenious)\n"+
"_predadj(Ben, crazy)\n"+
"_advmod(ingenious, more)\n"+
"_comparative(ingenious, Jack)\n"+
"than(Jack, Ben)\n"+
"than1(ingenious, crazy)\n"+
"degree(ingenious, comparative)\n");
rc &= test_sentence ("Jack is less ingenious than Ben is crazy.",
"_predadj(Jack, ingenious)\n"+
"_predadj(Ben, crazy)\n"+
"_advmod(ingenious, less)\n"+
"_comparative(ingenious, Jack)\n"+
"than(Jack, Ben)\n"+
"than1(ingenious, crazy)\n"+
"degree(ingenious, comparative)\n");
//two entities two features Without More/less
rc &= test_sentence ("I slept longer than he worked",
"_subj(sleep, I)\n"+
"_subj(work, he)\n"+
"_advmod(sleep, long)\n"+
"than(I, he)\n"+
"than1(sleep, work)\n"+
"_comparative(long, sleep)\n"+
"degree(long, comparative)\n");
report(rc, "Comparatives");
return rc;
}
public boolean test_equatives()
{
boolean rc = true;
//Equative:two entities one feature
rc &= test_sentence ("Amen's hair is as long as Ben's.",
"_poss(hair, Amen)\n"+
"_predadj(hair, long)\n"+
"as(long, Ben)\n"+
"than(Amen, Ben)\n");
rc &= test_sentence ("Amen’s hair is same as Ben’s.",
"_poss(hair, Amen)\n"+
"_predadj(hair, same)\n"+
"as(same, Ben)\n"+
"than(Amen, Ben)\n");
rc &= test_sentence ("Jack’s hair color is similar to that of Ben’s.",
"_poss(color, Jack)\n"+
"_nn(color, hair)\n"+
"_predadj(color, similar)\n"+
"of(that, Ben)\n"+
"to(similar, that)\n"+
"than(Jack, Ben)\n");
rc &= test_sentence ("Jack’s hair color is similar to Ben's",
"_poss(color, Jack)\n"+
"_nn(color, hair)\n"+
"_predadj(color, similar)\n"+
"to(similar, Ben)\n"+
"than(Jack, Ben)\n");
rc &= test_sentence ("Jack is as intelligent as Ben.",
"_predadj(Jack, intelligent)\n"+
"as(intelligent, Ben)\n"+
"than(Jack, Ben)\n");
rc &= test_sentence ("The book’s color is same as that of the pen’s.",
"_poss(color, book)\n"+
"_predadj(color, same)\n"+
"of(that, pen)\n"+
"as(same, that)\n"+
"than(book, pen)\n");
rc &= test_sentence ("The snail is running exactly as fast as the cheetah.",
"_predadj(snail, run)\n"+
"as(run, cheetah)\n"+
"_advmod(fast, exactly)\n"+
"than(snail, cheetah)\n");
//one entity one feature, through time
rc &= test_sentence ("The coffee tastes the same as it did last year.",
"_subj(taste, coffee)\n"+
"_obj(taste, same)\n"+
"_obj(do, year)\n"+
"_subj(do, it)\n"+
"as(taste, do)\n"+
"_amod(year, last)\n");
rc &= test_sentence ("The coffee tastes as it did last year.",
"_subj(taste, coffee)\n"+
"_obj(do, year)\n"+
"_subj(do, it)\n"+
"as(taste, do)\n"+
"_amod(year, last)\n");
rc &= test_sentence ("Mike runs as fast as he did last year.",
"_subj(do, he)\n"+
"_subj(run, Mike)\n"+
"as(fast, he)\n"+
"_advmod(run, fast)\n"+
"_advmod(do, year)\n"+
"_amod(year, last)\n"+
"than(Mike, he)\n");
rc &= test_sentence ("The kick was as soft as the first.",
"_predadj(kick, soft)\n"+
"as(kick, first)\n");
rc &= test_sentence ("He is as smart as I ever expected him to be.",
"_predadj(he, smart)\n"+
"_subj(expect, I)\n"+
"_obj(expect, him)\n"+
"as(smart, expect)\n"+
"_advmod(expect, ever)\n"+
"_to-do(smart, be)\n");
report(rc, "Equatives");
return rc;
}
public boolean test_conjunctions()
{
boolean rc = true;
// conjoined verbs
rc &= test_sentence ("Scientists make observations and ask questions.",
"_obj(make, observation)\n" +
"_obj(ask, question)\n" +
"_subj(make, scientist)\n" +
"_subj(ask, scientist)\n" +
"conj_and(make, ask)\n");
// conjoined nouns
rc &= test_sentence ("She is a student and an employee.",
"_obj(be, student)\n" +
"_obj(be, employee)\n" +
"_subj(be, she)\n" +
"conj_and(student, employee)\n");
// conjoined adjectives
rc &= test_sentence ("I hailed a black and white taxi.",
"_obj(hail, taxi)\n" +
"_subj(hail, I)\n" +
"_amod(taxi, black)\n" +
"_amod(taxi, white)\n" +
"conj_and(black, white)\n");
// conjoined adverbs
rc &= test_sentence ("She ran quickly and quietly.",
"_advmod(run, quickly)\n" +
"_advmod(run, quietly)\n" +
"_subj(run, she)\n" +
"conj_and(quickly, quietly)\n");
// adjectival modifiers on conjoined subject
rc &= test_sentence ("The big truck and the little car collided.",
"_amod(car, little)\n" +
"_amod(truck, big)\n" +
"_subj(collide, truck)\n" +
"_subj(collide, car)\n" +
"conj_and(truck, car)\n");
// verbs with modifiers
rc &= test_sentence ("We ate dinner at home and went to the movies.",
"_obj(eat, dinner)\n" +
"conj_and(eat, go)\n" +
"at(eat, home)\n" +
"_subj(eat, we)\n" +
"to(go, movie)\n" +
"_subj(go, we)\n");
// verb with more modifiers
rc &= test_sentence ("We ate a late dinner at home and went out to the movies afterwards.",
"_obj(eat, dinner)\n" +
"conj_and(eat, go_out)\n" +
"at(eat, home)\n" +
"_subj(eat, we)\n" +
"to(go_out, movie)\n" +
"_advmod(go_out, afterwards)\n" +
"_subj(go_out, we)\n" +
"_amod(dinner, late)\n");
// conjoined ditransitive verbs
rc &= test_sentence ("She baked him a cake and sang him a song.",
"_iobj(sing, him)\n" +
"_obj(sing, song)\n" +
"_subj(sing, she)\n" +
"_iobj(bake, him)\n" +
"_obj(bake, cake)\n" +
"conj_and(bake, sing)\n" +
"_subj(bake, she)\n");
// conjoined adverbs with modifiers
rc &= test_sentence ("she ran very quickly and extremely quietly.",
"_advmod(run, quickly)\n" +
"_advmod(run, quietly)\n" +
"_subj(run, she)\n" +
"_advmod(quietly, extremely)\n" +
"conj_and(quickly, quietly)\n" +
"_advmod(quickly, very)\n");
// conjoined adverbs with out modifiers
rc &= test_sentence ("She handled it quickly and gracefully.",
"_obj(handle, quickly)\n" +
"_obj(handle, gracefully)\n" +
"_advmod(handle, quickly)\n" +
"_advmod(handle, gracefully)\n" +
"_subj(handle, she)\n" +
"conj_and(quickly, gracefully)\n");
// modifiers on conjoined adjectives
rc &= test_sentence ("He had very long and very white hair.",
"_obj(have, hair)\n" +
"_subj(have, he)\n" +
"_amod(hair, long)\n" +
"_amod(hair, white)\n" +
"_advmod(white, very)\n" +
"conj_and(long, white)\n" +
"_advmod(long, very)\n");
// adjectival modifiers on conjoined object
rc &= test_sentence ("The collision was between the little car and the big truck.",
"_pobj(between, car)\n" +
"_pobj(between, truck)\n" +
"_psubj(between, collision)\n" +
"_amod(truck, big)\n" +
"_amod(car, little)\n" +
"conj_and(car, truck)\n");
// Names Modifiers and conjunction
rc &= test_sentence ("Big Tom and Angry Sue went to the movies.",
"to(go, movie)\n" +
"_subj(go, Big_Tom)\n" +
"_subj(go, Angry_Sue)\n" +
"conj_and(Big_Tom, Angry_Sue)\n");
//Correlative conjunction
rc &= test_sentence ("I could use neither the lorry nor the van.",
"_to-do(could, use)\n"+
"_quantity(lorry, neither)\n"+
"conj_neither_nor(lorry, van)\n"+
"_obj(use, lorry)\n"+
"_obj(use, van)\n"+
"_subj(use, I)\n");
report(rc, "Conjunction");
return rc;
}
public boolean test_extraposition()
{
boolean rc = true;
rc &= test_sentence ("The woman who lives next door is a registered nurse.",
"_obj(be, nurse)\n" +
"_subj(be, woman)\n" +
"_amod(nurse, registered)\n" +
"_advmod(live, next_door)\n" +
"_subj(live, woman)\n" +
"who(woman, live)\n");
rc &= test_sentence ("A player who is injured has to leave the field.",
"_to-do(have, leave)\n" +
"_subj(have, player)\n" +
"_obj(leave, field)\n" +
"_predadj(player, injured)\n" +
"who(player, injured)\n" );
rc &= test_sentence ("Pizza, which most people love, is not very healthy.",
"_advmod(very, not)\n" +
"_advmod(healthy, very)\n" +
"_obj(love, Pizza)\n" +
"_quantity(people, most)\n" +
"which(Pizza, love)\n" +
"_subj(love, people)\n" +
"_predadj(Pizza, healthy)\n" );
rc &= test_sentence ("The restaurant which belongs to my aunt is very famous.",
"_advmod(famous, very)\n" +
"to(belong, aunt)\n" +
"_subj(belong, restaurant)\n" +
"_poss(aunt, me)\n" +
"which(restaurant, belong)\n" +
"_predadj(restaurant, famous)\n");
rc &= test_sentence ("The books which I read in the library were written by Charles Dickens.",
"_obj(write, book)\n" +
"by(write, Charles_Dickens)\n" +
"_obj(read, book)\n" +
"in(read, library)\n" +
"_subj(read, I)\n" +
"which(book, read)\n");
rc &= test_sentence("This is the book whose author I met in a library.",
"_obj(be, book)\n" +
"_subj(be, this)\n" +
"_obj(meet, author)\n" +
"in(meet, library)\n" +
"_subj(meet, I)\n" +
"whose(book, author)\n");
rc &= test_sentence("The book that Jack lent me is very boring.",
"_advmod(boring, very)\n" +
"_iobj(lend, book)\n" +
"_obj(lend, me)\n" +
"_subj(lend, Jack)\n" +
"that(book, lend)\n" +
"_predadj(book, boring)\n");
rc &= test_sentence("They ate a special curry which was recommended by the restaurant’s owner.",
"_obj(eat, curry)\n" +
"_subj(eat, they)\n" +
"_obj(recommend, curry)\n" +
"by(recommend, owner)\n" +
"_poss(owner, restaurant)\n" +
"which(curry, recommend)\n" +
"_amod(curry, special)\n");
rc &= test_sentence("The dog who Jack said chased me was black.",
"_obj(chase, me)\n" +
"_subj(chase, dog)\n" +
"_subj(say, Jack)\n" +
"_predadj(dog, black)\n" +
"who(dog, chase)\n");
rc &= test_sentence("Jack, who hosted the party, is my cousin.",
"_obj(be, cousin)\n" +
"_subj(be, Jack)\n" +
"_poss(cousin, me)\n" +
"_obj(host, party)\n" +
"_subj(host, Jack)\n" +
"who(Jack, host)\n");
rc &= test_sentence("Jack, whose name is in that book, is the student near the window.",
"near(be, window)\n" +
"_obj(be, student)\n" +
"_subj(be, Jack)\n" +
"_pobj(in, book)\n" +
"_psubj(in, name)\n" +
"_det(book, that)\n" +
"whose(Jack, name)\n");
rc &= test_sentence("Jack stopped the police car that was driving fast.",
"_obj(stop, car)\n" +
"_subj(stop, Jack)\n" +
"_advmod(drive, fast)\n" +
"_predadj(car, drive)\n" +
"that(car, drive)\n" +
"_nn(car, police)\n");
rc &= test_sentence("Just before the crossroads, the car was stopped by a traffic sign that stood on the street.",
"_obj(stop, car)\n" +
"by(stop, sign)\n" +
"_advmod(stop, just)\n" +
"on(stand, street)\n" +
"_subj(stand, sign)\n" +
"that(sign, stand)\n" +
"_nn(sign, traffic)\n" +
"before(just, crossroads)\n");
report(rc, "Extrapostion");
return rc;
}
//Added by Matthew
public boolean test_inquisitives()
{
boolean rc = true;
rc &= test_sentence ("What is Socrates?",
"_obj(be, Socrates)\n" +
"_subj(be, _$qVar)\n");
rc &= test_sentence ("Who is the teacher?",
"_obj(be, teacher)\n" +
"_subj(be, _$qVar)\n");
rc &= test_sentence ("Who is a man?",
"_obj(be, man)\n" +
"_subj(be, _$qVar)\n");
rc &= test_sentence ("Who told you that bullshit?",
"_iobj(tell, you)\n" +
"_obj(tell, bullshit)\n" +
"_subj(tell, _$qVar)\n" +
"_det(bullshit, that)\n");
rc &= test_sentence ("Who told that story to the police?",
"to(tell, police)\n" +
"_obj(tell, story)\n" +
"_subj(tell, _$qVar)\n" +
"_det(story, that)\n");
rc &= test_sentence ("What gives you that idea?",
"_iobj(give, you)\n" +
"_obj(give, idea)\n" +
"_subj(give, _$qVar)\n" +
"_det(idea, that)\n");
rc &= test_sentence ("What gave that idea to the police?",
"to(give, police)\n" +
"_obj(give, idea)\n" +
"_subj(give, _$qVar)\n" +
"_det(idea, that)\n");
rc &= test_sentence ("What did you tell the fuzz?",
"_iobj(tell, fuzz)\n" +
"_obj(tell, _$qVar)\n" +
"_subj(tell, you)\n");
rc &= test_sentence ("What did you give to Mary?",
"to(give, Mary)\n" +
"_obj(give, _$qVar)\n" +
"_subj(give, you)\n");
rc &= test_sentence ("Who did you give the slavers?",
"_subj(give, you)\n");
rc &= test_sentence ("Who did you sell to the slavers?",
"to(sell, to)\n" +
"_subj(sell, you)\n");
rc &= test_sentence ("To whom did you sell the children?",
"to(sell, _$qVar)\n" +
"_obj(sell, child)\n" +
"_subj(sell, you)\n");
rc &= test_sentence ("To what do we owe the pleasure?",
"to(owe, _$qVar)\n" +
"_obj(owe, pleasure)\n" +
"_subj(owe, we)\n");
rc &= test_sentence ("Who did you sell the children to?",
"to(sell, to)\n" +
"_obj(sell, child)\n" +
"_subj(sell, you)\n");
rc &= test_sentence ("What bothers you?",
"_obj(bother, you)\n" +
"_subj(bother, _$qVar)\n");
rc &= test_sentence ("Who programmed you?",
"_obj(program, you)\n" +
"_subj(program, _$qVar)\n");
rc &= test_sentence ("What is on the table?",
"_pobj(on, table)\n" +
"_psubj(on, _$qVar)\n");
rc &= test_sentence ("What did you say?",
"_obj(say, _$qVar)\n" +
"_subj(say, you)\n");
rc &= test_sentence ("Who do you love?",
"_obj(love, _$qVar)\n" +
"_subj(love, you)\n");
rc &= test_sentence ("What is for dinner?",
"_pobj(for, dinner)\n" +
"_psubj(for, _$qVar)\n");
rc &= test_sentence ("Who's on first?",
"_pobj(on, first)\n" +
"_psubj(on, _$qVar)\n");
rc &= test_sentence ("Who farted?",
"_subj(fart, _$qVar)\n");
rc &= test_sentence ("What is happening?",
"_subj(happen, _$qVar)\n");
rc &= test_sentence ("Who is correct?",
"_predadj(_$qVar, correct)\n");
rc &= test_sentence ("What is right?",
"_predadj(_$qVar, right)\n");
rc &= test_sentence ("What are you doing?",
"_subj(_$qVar, you)\n");
rc &= test_sentence ("Are you the one?",
"_obj(be, one)\n" +
"_subj(be, you)\n");
rc &= test_sentence ("Are you mad?",
"_predadj(you, mad)\n");
rc &= test_sentence ("Is the book under the table?",
"under(book, table)\n" +
"_subj(be, book)\n");
rc &= test_sentence ("Does he seem mad?",
"_to-be(seem, mad)\n" +
"_subj(seem, he)\n");
rc &= test_sentence ("Does she want to help us?",
"_obj(help, us)\n" +
"_to-do(want, help)\n" +
"_subj(want, she)\n");
rc &= test_sentence ("Does she want you to help us?",
"_obj(help, us)\n" +
"_subj(help, you)\n" +
"_to-do(want, help)\n" +
"_subj(want, she)\n");
rc &= test_sentence ("Was she good enough to help?",
"_predadj(she, good)\n" +
"_to-do(good, help)\n");
rc &= test_sentence ("Must she be able to sing?",
"_to-do(able, sing)\n" +
"_predadj(she, able)\n");
rc &= test_sentence ("Does she want to sing?",
"_to-do(want, sing)\n" +
"_subj(want, she)\n");
rc &= test_sentence ("Have you slept?",
"_subj(sleep, you)\n");
rc &= test_sentence ("Will you sleep?",
"_subj(sleep, you)\n");
rc &= test_sentence ("Did you sleep?",
"_subj(sleep, you)\n");
rc &= test_sentence ("Did you eat the leftover baba-ganoush?",
"_obj(eat, leftover)\n" +
"_to-be(eat, baba-ganoush)\n" +
"_subj(eat, you)\n");
rc &= test_sentence ("Did you give her the money?",
"_iobj(give, her)\n" +
"_obj(give, money)\n" +
"_subj(give, you)\n");
rc &= test_sentence ("Did you give the money to her?",
"to(give, her)\n" +
"_obj(give, money)\n" +
"_subj(give, you)\n");
rc &= test_sentence ("The book is under the table?",
"_pobj(under, table)\n" +
"_psubj(under, book)\n");
rc &= test_sentence ("Maybe she eats lunch.",
"_obj(eat, lunch)\n" +
"_advmod(eat, maybe)\n" +
"_subj(eat, she)\n");
rc &= test_sentence ("Perhaps she is nice.",
"_advmod(nice, perhaps)\n" +
"_predadj(she, nice)\n");
rc &= test_sentence ("She wants to help John.",
"_to-do(want, help)\n" +
"_subj(want, she)\n" +
"_obj(help, John)\n");
rc &= test_sentence ("She wants you to help us.",
"_to-do(want, help)\n" +
"_subj(want, she)\n" +
"_obj(help, us)\n" +
"_subj(help, you)\n");
rc &= test_sentence ("She is nice to help with the project.",
"with(help, project)\n" +
"_to-do(nice, help)\n" +
"_predadj(she, nice)\n");
rc &= test_sentence ("She must be able to sing.",
"_to-do(able, sing)\n" +
"_predadj(she, able)\n");
rc &= test_sentence ("She must need to sing?",
"_to-do(need, sing)\n" +
"_subj(need, she)\n");
rc &= test_sentence ("She must want to sing?",
"_to-do(want, sing)\n" +
"_subj(want, she)\n");
rc &= test_sentence ("She wants to sing.",
"_to-do(want, sing)\n" +
"_subj(want, she)\n");
rc &= test_sentence ("Where do you live?",
"_%atLocation(live, _$qVar)\n" +
"_subj(live, you)\n");
rc &= test_sentence ("Where did you eat dinner?",
"_%atLocation(eat, _$qVar)\n" +
"_obj(eat, dinner)\n" +
"_subj(eat, you)\n");
rc &= test_sentence ("Where is the party?",
"_%atLocation(_%copula, _$qVar)\n" +
"_subj(_%copula, party)\n");
rc &= test_sentence ("Where will she be happy?",
"_%atLocation(happy, _$qVar)\n" +
"_predadj(she, happy)\n");
rc &= test_sentence ("When did jazz die?",
"_%atTime(die, _$qVar)\n" +
"_subj(die, jazz)\n");
rc &= test_sentence ("When did you bake the cake?",
"_%atTime(bake, _$qVar)\n" +
"_obj(bake, cake)\n" +
"_subj(bake, you)\n");
rc &= test_sentence ("When did you give him the money?",
"_iobj(give, him)\n" +
"_%atTime(give, _$qVar)\n" +
"_obj(give, money)\n" +
"_subj(give, you)\n");
rc &= test_sentence ("When is the party?",
"_%atTime(_%copula, _$qVar)\n" +
"_subj(_%copula, party)\n");
rc &= test_sentence ("Why do you live?",
"_subj(live, you)\n");
rc &= test_sentence ("Why do you like terrible music?",
"_obj(like, music)\n" +
"_subj(like, you)\n" +
"_amod(music, terrible)\n");
rc &= test_sentence ("Why are you such a fool?",
"_obj(be, fool)\n" +
"_subj(be, you)\n");
rc &= test_sentence ("How did you sleep?",
"how(sleep, _$qVar)\n" +
"_subj(sleep, you)\n");
rc &= test_sentence ("How was the party?",
"how(_%copula, _$qVar)\n" +
"_subj(_%copula, party)\n");
rc &= test_sentence ("How is your food?",
"_poss(food, you)\n" +
"how(_%copula, _$qVar)\n" +
"_subj(_%copula, food)\n");
rc &= test_sentence ("How much money does it cost?",
"_obj(cost, money)\n" +
"_subj(cost, it)\n" +
"_quantity(money, _$qVar)\n");
rc &= test_sentence ("How many books have you read?",
"_obj(read, book)\n" +
"_subj(read, you)\n" +
"_quantity(book, _$qVar)\n");
rc &= test_sentence ("How fast does it go?",
"_advmod(fast, _$qVar)\n" +
"_subj(go, it)\n");
rc &= test_sentence ("Which girl do you like?",
"_obj(like, girl)\n" +
"_subj(like, you)\n" +
"_quantity(girl, _$qVar)\n");
rc &= test_sentence ("Which girl likes you?",
"_obj(like, you)\n" +
"_subj(like, girl)\n" +
"_quantity(girl, _$qVar)\n");
rc &= test_sentence ("Which girl is crazy?",
"_quantity(girl, _$qVar)\n" +
"_predadj(girl, crazy)\n");
rc &= test_sentence ("The books were written by Charles Dickens.",
"_obj(write, book)\n" +
"by(write, Charles_Dickens)\n");
rc &= test_sentence ("The books are published.",
"_obj(publish, book)\n");
rc &= test_sentence ("I did my homework, and I went to school.",
"_obj(do, homework)\n" +
"_subj(do, I)\n" +
"to(go, school)\n" +
"_subj(go, I)\n" +
"_poss(homework, me)\n");
rc &= test_sentence ("John and Madison eat the cake.",
"_obj(eat, cake)\n" +
"_subj(eat, John)\n" +
"_subj(eat, Madison)\n" +
"conj_and(John, Madison)\n");
rc &= test_sentence ("Joan is poor but happy.",
"_predadj(Joan, poor)\n" +
"_predadj(Joan, happy)\n");
rc &= test_sentence ("I think that dogs can fly.",
"that(think, fly)\n" +
"_subj(think, I)\n" +
"_subj(fly, dog)\n");
rc &= test_sentence ("He is glad that she won.",
"that(glad, win)\n" +
"_subj(win, she)\n" +
"_predadj(he, glad)\n");
rc &= test_sentence ("He ran so quickly that he flew.",
"_advmod(quickly, so)\n" +
"_subj(fly, he)\n" +
"that(run, fly)\n" +
"_advmod(run, quickly)\n" +
"_subj(run, he)\n");
rc &= test_sentence ("I had dinner at 6 pm",
"_obj(have, dinner)\n" +
"at(have, pm)\n" +
"_subj(have, I)\n" +
"_time(pm, 6)\n");
rc &= test_sentence ("I went to sleep at 1 am",
"to(go, sleep)\n" +
"_subj(go, I)\n" +
"_time(am, 1)\n" +
"at(sleep, am)\n");
rc &= test_sentence ("Who farted?",
"_subj(fart, _$qVar)\n");
rc &= test_sentence ("What happened?",
"_subj(happen, _$qVar)\n");
rc &= test_sentence ("What killed him?",
"_obj(kill, him)\n" +
"_subj(kill, _$qVar)\n");
rc &= test_sentence ("Who ate the pizza?",
"_obj(eat, pizza)\n" +
"_subj(eat, _$qVar)\n");
rc &= test_sentence ("What gave you that idea?",
"_iobj(give, you)\n" +
"_obj(give, idea)\n" +
"_subj(give, _$qVar)\n" +
"_det(idea, that)\n");
rc &= test_sentence ("Who told you that?",
"_iobj(tell, you)\n" +
"_obj(tell, that)\n" +
"_subj(tell, _$qVar)\n");
rc &= test_sentence ("What is for dinner?",
"_pobj(for, dinner)\n" +
"_psubj(for, _$qVar)\n");
rc &= test_sentence ("Who's on first?",
"_pobj(on, first)\n" +
"_psubj(on, _$qVar)\n");
rc &= test_sentence ("Who are you?",
"_subj(_%copula, you)\n");
rc &= test_sentence ("Who do you love?",
"_obj(love, _$qVar)\n" +
"_subj(love, you)\n");
rc &= test_sentence ("What do you think?",
"_obj(think, _$qVar)\n" +
"_subj(think, you)\n");
rc &= test_sentence ("To whom did you sell the children?",
"to(sell, _$qVar)\n" +
"_obj(sell, child)\n" +
"_subj(sell, you)\n");
rc &= test_sentence ("Why did you give him the money?",
"_iobj(give, him)\n" +
"_obj(give, money)\n" +
"_subj(give, you)\n");
rc &= test_sentence ("Why are you so stupid?",
"_advmod(stupid, so)\n" +
"_predadj(you, stupid)\n");
rc &= test_sentence ("How did you like the movie?",
"_obj(like, movie)\n" +
"how(like, _$qVar)\n" +
"_subj(like, you)\n");
rc &= test_sentence ("How did you send him the message?",
"_iobj(send, him)\n" +
"_obj(send, message)\n" +
"how(send, _$qVar)\n" +
"_subj(send, you)\n");
report(rc, "Inquisitives");
return rc;
}
public static void main(String[] args)
{
setUpClass();
TestRelEx ts = new TestRelEx();
ts.runTests();
}
@Test
public void runTests() {
TestRelEx ts = this;
boolean rc = true;
rc &= ts.test_determiners();
rc &= ts.test_time();
rc &= ts.test_comparatives();
rc &= ts.test_equatives();
rc &= ts.test_extraposition();
rc &= ts.test_conjunctions();
rc &= ts.test_inquisitives();
if (rc) {
System.err.println("Tested " + ts.pass + " sentences, test passed OK");
} else {
System.err.println("Test failed\n\t" +
ts.fail + " sentences failed\n\t" +
ts.pass + " sentences passed");
}
System.err.println("******************************");
System.err.println("Failed test sentences on Relex");
System.err.println("******************************");
if (sentfail.isEmpty())
System.err.println("All test sentences passed");
for(String temp : sentfail){
System.err.println(temp);
}
System.err.println("******************************\n");
}
}
|
more broken stuff...
|
src/java_test/relex/test/TestRelEx.java
|
more broken stuff...
|
|
Java
|
apache-2.0
|
a8dbb48ebfc7a4b3e23e5fd544e3ddc54dd47132
| 0
|
aneznamova/java_pft
|
package ru.stqa.ptf.sandbox;
public class MyFirstProgram {
public static void main(String[] args) {
System.out.println("Hello, world!");
}
}
|
sandbox/src/main/java/ru/stqa/ptf/sandbox/MyFirstProgram.java
|
package ru.stqa.ptf.sandbox;
public class MyFirstProgram {
public static void main(String[] args) { System.out.println("Hello, world!"); }
}
|
changed format settings too
|
sandbox/src/main/java/ru/stqa/ptf/sandbox/MyFirstProgram.java
|
changed format settings too
|
|
Java
|
apache-2.0
|
6cff74dd6712a9dc6aa062913096b6f55395f776
| 0
|
MC-U-Team/U-Team-Core,MC-U-Team/U-Team-Core
|
package info.u_team.u_team_test.data.provider;
import info.u_team.u_team_core.data.*;
import info.u_team.u_team_test.init.*;
public class TestLanguagesProvider extends CommonLanguagesProvider {
public TestLanguagesProvider(GenerationData data) {
super(data);
}
@Override
public void addTranslations() {
add(TestItemGroups.GROUP, "UTeamTest Tab");
addItem(TestItems.BASIC, "Basic Item");
add(TestItems.BASIC.get().getTranslationKey() + ".outofrange", "\u00A74Out of range");
addItem(TestItems.BETTER_ENDERPEARL, "Better Enderpearl");
addItem(TestItems.BASIC_FOOD, "Basic Food");
addItem(TestItems.BASIC_ARMOR.getHelmet(), "Basic Helmet");
addItem(TestItems.BASIC_ARMOR.getChestplate(), "Basic Chestplate");
addItem(TestItems.BASIC_ARMOR.getLeggings(), "Basic Leggings");
addItem(TestItems.BASIC_ARMOR.getBoots(), "Basic Boots");
addItem(TestItems.BASIC_TOOL.getAxe(), "Basic Axe");
addItem(TestItems.BASIC_TOOL.getHoe(), "Basic Hoe");
addItem(TestItems.BASIC_TOOL.getPickaxe(), "Basic Pickaxe");
addItem(TestItems.BASIC_TOOL.getShovel(), "Basic Shovel");
addItem(TestItems.BASIC_TOOL.getSword(), "Basic Sword");
addBlock(TestBlocks.BASIC, "Basic Block");
addBlock(TestBlocks.BASIC_TILEENTITY, "Tile Entity Block");
addBlock(TestBlocks.BASIC_ENERGY_CREATOR, "Energy Creator");
addBlock(TestBlocks.BASIC_FLUID_INVENTORY, "Fluid Inventory");
addEntityType(TestEntityTypes.BETTER_ENDERPEARL, "Better Enderpearl");
addEnchantment(TestEnchantments.AUTO_SMELT, "Auto Smelt");
addEffect(TestEffects.RADIATION, "Radiation");
add("death.attack.radiation", "%1$s dies of radiation");
add("item.minecraft.potion.effect.radiation", "Potion of Radiation");
add("item.minecraft.splash_potion.effect.radiation", "Splash Potion of Radiation");
add("item.minecraft.lingering_potion.effect.radiation", "Lingering Potion of Radiation");
addBiome(TestBiomes.BASIC, "Basic");
}
}
|
src/test/java/info/u_team/u_team_test/data/provider/TestLanguagesProvider.java
|
package info.u_team.u_team_test.data.provider;
import info.u_team.u_team_core.data.*;
import info.u_team.u_team_test.init.*;
public class TestLanguagesProvider extends CommonLanguagesProvider {
public TestLanguagesProvider(GenerationData data) {
super(data);
}
@Override
public void addTranslations() {
add(TestItemGroups.GROUP, "UTeamTest Tab");
add(TestItems.BASIC.get(), "Basic Item");
add(TestItems.BASIC.get().getTranslationKey() + ".outofrange", "\u00A74Out of range");
add(TestItems.BETTER_ENDERPEARL.get(), "Better Enderpearl");
add(TestItems.BASIC_FOOD.get(), "Basic Food");
add(TestItems.BASIC_ARMOR.getHelmet().get(), "Basic Helmet");
add(TestItems.BASIC_ARMOR.getChestplate().get(), "Basic Chestplate");
add(TestItems.BASIC_ARMOR.getLeggings().get(), "Basic Leggings");
add(TestItems.BASIC_ARMOR.getBoots().get(), "Basic Boots");
add(TestItems.BASIC_TOOL.getAxe().get(), "Basic Axe");
add(TestItems.BASIC_TOOL.getHoe().get(), "Basic Hoe");
add(TestItems.BASIC_TOOL.getPickaxe().get(), "Basic Pickaxe");
add(TestItems.BASIC_TOOL.getShovel().get(), "Basic Shovel");
add(TestItems.BASIC_TOOL.getSword().get(), "Basic Sword");
add(TestBlocks.BASIC.get(), "Basic Block");
add(TestBlocks.BASIC_TILEENTITY.get(), "Tile Entity Block");
add(TestBlocks.BASIC_ENERGY_CREATOR.get(), "Energy Creator");
add(TestBlocks.BASIC_FLUID_INVENTORY.get(), "Fluid Inventory");
add(TestEntityTypes.BETTER_ENDERPEARL.get(), "Better Enderpearl");
add(TestEnchantments.AUTO_SMELT.get(), "Auto Smelt");
add(TestEffects.RADIATION.get(), "Radiation");
add("death.attack.radiation", "%1$s dies of radiation");
add("item.minecraft.potion.effect.radiation", "Potion of Radiation");
add("item.minecraft.splash_potion.effect.radiation", "Splash Potion of Radiation");
add("item.minecraft.lingering_potion.effect.radiation", "Lingering Potion of Radiation");
add(TestBiomes.BASIC.get(), "Basic");
}
}
|
Use the supplier methods in the test languages provider now
|
src/test/java/info/u_team/u_team_test/data/provider/TestLanguagesProvider.java
|
Use the supplier methods in the test languages provider now
|
|
Java
|
mit
|
3681ad73842f2a57418b6ad00f3b11620e853881
| 0
|
rextrebat/ecloudmanager,AltisourceLabs/ecloudmanager,rextrebat/ecloudmanager,AltisourceLabs/ecloudmanager,AltisourceLabs/ecloudmanager,rextrebat/ecloudmanager
|
agent/inflector/src/main/java/org/ecloudmanager/agent/controller/VMController.java
|
package org.ecloudmanager.agent.controller;
import io.swagger.inflector.models.RequestContext;
import io.swagger.inflector.models.ResponseContext;
import org.ecloudmanager.agent.model.VM;
import org.ecloudmanager.agent.model.VMInfo;
import javax.ws.rs.core.Response.Status;
public class VmController {
public ResponseContext createVM(RequestContext request, VM vm) {
return new ResponseContext().status(Status.ACCEPTED).entity("777");
}
ResponseContext getVM(io.swagger.inflector.models.RequestContext request, String vmId) {
VMInfo info = new VMInfo().status("running");
return new ResponseContext().status(Status.OK).entity(info);
}
}
|
build fix
|
agent/inflector/src/main/java/org/ecloudmanager/agent/controller/VMController.java
|
build fix
|
||
Java
|
mit
|
2003dacf00a039f4c84b5d3a4c5c1fe1017a269d
| 0
|
benasic/dbgen
|
package application.controller;
import application.generator.Generator;
import application.generator.IntegerGenerator;
import javafx.fxml.FXML;
import javafx.scene.control.TextField;
import javafx.util.converter.NumberStringConverter;
import java.text.NumberFormat;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class IntegerGeneratorController {
@FXML
private TextField generatorTextField;
private IntegerGenerator integerGenerator;
@FXML
private void initialize(){
Pattern p = Pattern.compile("\\D");
generatorTextField.textProperty().addListener((observable, oldValue, newValue) ->
{
Matcher m = p.matcher(newValue);
if(m.find()){
generatorTextField.textProperty().setValue(m.replaceAll(""));
}
});
}
public void setGenerator(Generator generator){
integerGenerator = (IntegerGenerator)generator;
NumberStringConverter numberStringConverter = new NumberStringConverter(){
@Override
protected NumberFormat getNumberFormat() {
return NumberFormat.getIntegerInstance();
}
};
generatorTextField.textProperty().bindBidirectional(this.integerGenerator.getGeneratorIntegerProperty(), numberStringConverter);
}
}
|
src/application/controller/IntegerGeneratorController.java
|
package application.controller;
import application.generator.Generator;
import application.generator.IntegerGenerator;
import javafx.fxml.FXML;
import javafx.scene.control.TextField;
import javafx.util.StringConverter;
import javafx.util.converter.IntegerStringConverter;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class IntegerGeneratorController {
@FXML
private TextField generatorTextField;
private IntegerGenerator integerGenerator;
@FXML
private void initialize(){
Pattern p = Pattern.compile("\\D");
generatorTextField.textProperty().addListener((observable, oldValue, newValue) ->
{
Matcher m = p.matcher(newValue);
System.out.print(oldValue + ' ' + newValue + '\n');
if(m.find()){
System.out.println("Promjena");
generatorTextField.textProperty().setValue(m.replaceAll(""));
}
});
}
public void setGenerator(Generator generator){
integerGenerator = (IntegerGenerator)generator;
generatorTextField.textProperty().bindBidirectional(this.integerGenerator.getGeneratorIntegerProperty(),(StringConverter)new IntegerStringConverter());
}
}
|
Fixed integer parsing in IntegerGenerator
|
src/application/controller/IntegerGeneratorController.java
|
Fixed integer parsing in IntegerGenerator
|
|
Java
|
mit
|
8773bfed848d5c3c843e8a77acc4a6d954139796
| 0
|
vincentzhang96/VahrhedralBot
|
package co.phoenixlab.discord.commands;
import co.phoenixlab.common.lang.SafeNav;
import co.phoenixlab.common.lang.number.ParseInt;
import co.phoenixlab.common.localization.Localizer;
import co.phoenixlab.discord.CommandDispatcher;
import co.phoenixlab.discord.Configuration;
import co.phoenixlab.discord.MessageContext;
import co.phoenixlab.discord.VahrhedralBot;
import co.phoenixlab.discord.api.ApiConst;
import co.phoenixlab.discord.api.DiscordApiClient;
import co.phoenixlab.discord.api.DiscordWebSocketClient;
import co.phoenixlab.discord.api.entities.*;
import co.phoenixlab.discord.api.event.LogInEvent;
import co.phoenixlab.discord.commands.tempstorage.Minific;
import co.phoenixlab.discord.commands.tempstorage.MinificStorage;
import com.google.gson.Gson;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.JsonNode;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.http.exceptions.UnirestException;
import org.apache.http.HttpHeaders;
import org.apache.http.entity.ContentType;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static co.phoenixlab.discord.api.DiscordApiClient.*;
import static co.phoenixlab.discord.api.entities.Permission.CHAT_MANAGE_MESSAGES;
import static co.phoenixlab.discord.api.entities.Permission.GEN_MANAGE_ROLES;
import static co.phoenixlab.discord.commands.CommandUtil.findUser;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.nio.file.StandardOpenOption.CREATE;
import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
public class Commands {
private final AdminCommands adminCommands;
private final DnCommands dnCommands;
private final ModCommands modCommands;
private final Localizer loc;
private final Random random;
// Temporary until command throttling is implemented
private Instant lastInsultTime;
private MinificStorage minificStorage;
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("MMM dd uuuu");
private static final Path MINIFIC_STORE = Paths.get("config/minific.json");
public Commands(VahrhedralBot bot) {
adminCommands = new AdminCommands(bot);
dnCommands = new DnCommands(bot);
modCommands = new ModCommands(bot);
loc = bot.getLocalizer();
random = new Random();
}
public void register(CommandDispatcher d) {
adminCommands.registerAdminCommands();
dnCommands.registerDnCommands();
modCommands.registerModCommands();
d.registerAlwaysActiveCommand("commands.general.admin", this::admin);
d.registerAlwaysActiveCommand("commands.general.mod", this::mod);
d.registerCommand("commands.general.admins", this::listAdmins);
d.registerCommand("commands.general.info", this::info);
d.registerCommand("commands.general.avatar", this::avatar);
d.registerCommand("commands.general.version", this::version);
d.registerCommand("commands.general.stats", this::stats);
d.registerCommand("commands.general.roles", this::roles);
d.registerCommand("commands.general.rolecolor", this::roleColor);
d.registerCommand("commands.general.sandwich", this::makeSandwich);
d.registerCommand("commands.general.dn", this::dnCommands);
d.registerCommand("commands.general.insult", this::insult);
d.registerCommand("commands.general.minific", this::minific);
}
public AdminCommands getAdminCommands() {
return adminCommands;
}
public DnCommands getDnCommands() {
return dnCommands;
}
public ModCommands getModCommands() {
return modCommands;
}
private void admin(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
Message message = context.getMessage();
// Easter egg
if ("ku".equalsIgnoreCase(args)) {
int number = random.nextInt(9) + 1;
int spotY = random.nextInt(2);
String spotYKey;
String spotXKey;
if (spotY == 0) {
spotYKey = "commands.general.admin.response.easter_egg.center";
} else if (spotY == 1) {
spotYKey = "commands.general.admin.response.easter_egg.top";
} else {
spotYKey = "commands.general.admin.response.easter_egg.bottom";
}
int spotX = random.nextInt(2);
if (spotX == 0) {
spotXKey = "commands.general.admin.response.easter_egg.center";
} else if (spotX == 1) {
spotXKey = "commands.general.admin.response.easter_egg.right";
} else {
spotXKey = "commands.general.admin.response.easter_egg.left";
}
String pos;
if (spotX == 0 && spotY == 0) {
pos = loc.localize("commands.general.admin.response.easter_egg.center");
} else {
pos = loc.localize("commands.general.admin.response.easter_egg.tuple",
loc.localize(spotYKey), loc.localize(spotXKey));
}
apiClient.sendMessage(
loc.localize("commands.general.admin.response.easter_egg.format", number, pos),
context.getChannel());
return;
}
// Permission check
if (!context.getBot().getConfig().isAdmin(message.getAuthor().getId())) {
if (context.getDispatcher().active().get()) {
apiClient.sendMessage(
loc.localize("commands.general.admin.response.reject", message.getAuthor().getUsername()),
context.getChannel());
}
return;
}
if (args.isEmpty()) {
args = "help";
}
adminCommands.getAdminCommandDispatcher().
handleCommand(new Message(message.getAuthor(), message.getChannelId(),
args, message.getId(), message.getMentions(), message.getTimestamp()));
}
private void mod(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
Message message = context.getMessage();
User author = context.getAuthor();
Server server = context.getServer();
if (!checkPermission(CHAT_MANAGE_MESSAGES, apiClient.getUserMember(author, server), server, apiClient)) {
apiClient.sendMessage(loc.localize("commands.general.mod.response.reject", author.getUsername()),
context.getChannel());
return;
}
if (args.isEmpty()) {
args = "help";
}
modCommands.getModCommandDispatcher().
handleCommand(new Message(message.getAuthor(), message.getChannelId(),
args, message.getId(), message.getMentions(), message.getTimestamp()));
}
private void listAdmins(MessageContext context, String s) {
DiscordApiClient apiClient = context.getApiClient();
VahrhedralBot bot = context.getBot();
StringJoiner joiner = new StringJoiner(", ");
bot.getConfig().getAdmins().stream().
map(apiClient::getUserById).
filter(user -> user != null).
map(User::getUsername).
forEach(joiner::add);
String res = joiner.toString();
if (res.isEmpty()) {
res = loc.localize("commands.general.admins.response.none");
}
apiClient.sendMessage(loc.localize("commands.general.admins.response.format", res),
context.getChannel());
}
private void info(MessageContext context, String args) {
Message message = context.getMessage();
Configuration config = context.getBot().getConfig();
User user;
if (!args.isEmpty()) {
user = findUser(context, args);
selfCheck(context, user);
} else {
user = message.getAuthor();
}
if (user == NO_USER) {
context.getApiClient().sendMessage(loc.localize("commands.general.info.response.not_found"),
message.getChannelId());
} else {
String avatar = (user.getAvatar() == null ? loc.localize("commands.general.info.response.no_avatar") :
user.getAvatarUrl().toExternalForm());
String response = loc.localize("commands.general.info.response.format",
user.getUsername(), user.getId(),
config.getBlacklist().contains(user.getId()) ?
loc.localize("commands.general.info.response.blacklisted") : "",
config.getAdmins().contains(user.getId()) ?
loc.localize("commands.general.info.response.admin") : "",
avatar);
context.getApiClient().sendMessage(response, context.getChannel());
}
}
private void avatar(MessageContext context, String args) {
Message message = context.getMessage();
if (args.startsWith(loc.localize("commands.general.avatar.subcommand.server"))) {
handleServerAvatar(context, message, args);
return;
}
User user;
if (!args.isEmpty()) {
user = findUser(context, args);
selfCheck(context, user);
} else {
user = message.getAuthor();
}
if (user == NO_USER) {
context.getApiClient().sendMessage(loc.localize("commands.general.avatar.response.not_found"),
message.getChannelId());
} else {
String avatar = (user.getAvatar() == null ?
loc.localize("commands.general.avatar.response.no_avatar") : user.getAvatarUrl().toExternalForm());
context.getApiClient().sendMessage(loc.localize("commands.general.avatar.response.format",
user.getUsername(), avatar),
context.getChannel());
}
}
private void handleServerAvatar(MessageContext context, Message message, String args) {
Server server;
DiscordApiClient apiClient = context.getApiClient();
Channel c = apiClient.getChannelById(message.getChannelId());
if (args.contains(" ")) {
args = args.split(" ", 2)[1];
server = apiClient.getServerByID(args);
if (server == NO_SERVER) {
apiClient.sendMessage(loc.localize("commands.general.avatar.response.server.not_member"),
context.getChannel());
}
} else if (c == null || c.getParent() == NO_SERVER) {
apiClient.sendMessage(loc.localize("commands.general.avatar.response.server.private"),
context.getChannel());
return;
} else {
server = c.getParent();
}
String icon = server.getIcon();
if (icon == null) {
apiClient.sendMessage(loc.localize("commands.general.avatar.response.server.format.none",
server.getName()),
context.getChannel());
} else {
apiClient.sendMessage(loc.localize("commands.general.avatar.response.server.format",
server.getName(), String.format(ApiConst.ICON_URL_PATTERN, server.getId(), icon)),
context.getChannel());
}
}
private void version(MessageContext context, String args) {
context.getApiClient().sendMessage(context.getBot().getVersionInfo(), context.getChannel());
}
private void stats(MessageContext context, String s) {
DiscordApiClient apiClient = context.getApiClient();
CommandDispatcher mainDispatcher = context.getBot().getMainCommandDispatcher();
CommandDispatcher.Statistics mdStats = mainDispatcher.getStatistics();
DiscordWebSocketClient.Statistics wsStats = apiClient.getWebSocketClient().getStatistics();
DiscordApiClient.Statistics apiStats = apiClient.getStatistics();
apiClient.sendMessage(loc.localize("commands.general.stats.response.format",
mdStats.commandHandleTime.summary(),
mdStats.acceptedCommandHandleTime.summary(),
mdStats.commandsReceived.sum(),
mdStats.commandsHandledSuccessfully.sum() + 1, // +1 since this executed OK but hasnt counted yet
mdStats.commandsRejected.sum(),
wsStats.avgMessageHandleTime.summary(),
wsStats.messageReceiveCount.sum(),
wsStats.keepAliveCount.sum(),
wsStats.errorCount.sum(),
apiStats.connectAttemptCount.sum(),
apiStats.eventCount.sum(),
apiStats.eventDispatchErrorCount.sum(),
apiStats.restErrorCount.sum()),
context.getChannel());
}
private void roles(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
Message message = context.getMessage();
User user;
if (!args.isEmpty()) {
user = findUser(context, args);
selfCheck(context, user);
} else {
user = message.getAuthor();
}
if (user == NO_USER) {
context.getApiClient().sendMessage(loc.localize("commands.general.roles.response.not_found"),
message.getChannelId());
} else {
Server server = context.getServer();
if (server == NO_SERVER) {
apiClient.sendMessage(loc.localize("commands.general.roles.response.private"), context.getChannel());
return;
}
Member member = apiClient.getUserMember(user, server);
if (member != NO_MEMBER) {
context.getApiClient().sendMessage(loc.localize("commands.general.roles.response.format",
user.getUsername(), listRoles(member, server, apiClient)),
context.getChannel());
} else {
context.getApiClient().sendMessage(loc.localize("commands.general.roles.response.member_not_found"),
context.getChannel());
}
}
}
private String listRoles(Member member, Server server, DiscordApiClient client) {
if (member.getRoles().isEmpty()) {
return loc.localize("commands.general.roles.response.no_roles");
}
StringJoiner joiner = new StringJoiner(", ");
member.getRoles().stream().
map(s -> client.getRole(s, server)).
filter(r -> r != NO_ROLE).
map(r -> loc.localize("commands.general.roles.response.role.format",
r.getName(), r.getId(), r.getColor())).
forEach(joiner::add);
return joiner.toString();
}
private void roleColor(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
Message message = context.getMessage();
// Check permissions first
Server server = context.getServer();
if (server == NO_SERVER) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.private"),
context.getChannel());
return;
}
Member issuer = apiClient.getUserMember(message.getAuthor(), server);
if (!(checkPermission(GEN_MANAGE_ROLES, issuer, server, apiClient) ||
context.getBot().getConfig().isAdmin(message.getAuthor().getId()))) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.no_user_perms"),
context.getChannel());
return;
}
Member bot = apiClient.getUserMember(apiClient.getClientUser(), server);
if (!checkPermission(GEN_MANAGE_ROLES, bot, server, apiClient)) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.no_bot_perms"),
context.getChannel());
return;
}
String[] split = args.split(" ");
if (split.length != 2) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.help_format"),
context.getChannel());
return;
}
String colorStr = split[1];
OptionalInt colorOpt = ParseInt.parseOptional(colorStr);
if (!colorOpt.isPresent()) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.help_format"),
context.getChannel());
return;
}
int color = colorOpt.getAsInt();
String roleId = split[0];
Role role = apiClient.getRole(roleId, server);
if (role == NO_ROLE) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.role_not_found"),
context.getChannel());
return;
}
patchRole(apiClient, message, server, color, role);
}
private void patchRole(DiscordApiClient apiClient, Message message, Server server, int color, Role role) {
try {
Map<String, String> headers = new HashMap<>();
headers.put(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.getMimeType());
headers.put(HttpHeaders.AUTHORIZATION, apiClient.getToken());
JSONObject requestBody = new JSONObject();
requestBody.put("color", color);
requestBody.put("hoist", role.isHoist());
requestBody.put("name", role.getName());
requestBody.put("permissions", role.getPermissions());
HttpResponse<JsonNode> response = Unirest.
patch(ApiConst.SERVERS_ENDPOINT + server.getId() + "/roles/" + role.getId()).
headers(headers).
body(new JsonNode(requestBody.toString())).
asJson();
if (response.getStatus() != 200) {
VahrhedralBot.LOGGER.warn("Unable to PATCH role: HTTP {}: {}",
response.getStatus(), response.getStatusText());
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.general_error"),
message.getChannelId());
return;
}
JsonNode body = response.getBody();
JSONObject obj = body.getObject();
if (obj.getInt("color") != color) {
VahrhedralBot.LOGGER.warn("Unable to PATCH role: Returned color does not match",
response.getStatus(), response.getStatusText());
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.general_error"),
message.getChannelId());
return;
}
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response",
role.getName(), role.getId(), color),
message.getChannelId());
} catch (UnirestException | JSONException e) {
VahrhedralBot.LOGGER.warn("Unable to PATCH role", e);
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.general_error"),
message.getChannelId());
}
}
private void makeSandwich(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
if (loc.localize("commands.general.sandwich.magic_word").equalsIgnoreCase(args) ||
new Random().nextBoolean()) {
apiClient.sendMessage(loc.localize("commands.general.sandwich.response.deny"),
context.getChannel());
} else {
apiClient.sendMessage(loc.localize("commands.general.sandwich.response.magic"),
context.getChannel());
}
}
private void dnCommands(MessageContext context, String args) {
Message message = context.getMessage();
if (args.isEmpty()) {
args = "help";
}
dnCommands.getDispatcher().
handleCommand(new Message(message.getAuthor(), message.getChannelId(),
args, message.getId(), message.getMentions(), message.getTimestamp()));
}
private void insult(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
Message message = context.getMessage();
boolean isNotAdmin = !context.getBot().getConfig().isAdmin(message.getAuthor().getId());
if (isNotAdmin) {
if (lastInsultTime != null) {
Instant now = Instant.now();
if (now.toEpochMilli() - lastInsultTime.toEpochMilli() < TimeUnit.MINUTES.toMillis(1)) {
apiClient.sendMessage(loc.localize("commands.general.insult.response.timeout"),
message.getChannelId());
return;
}
}
}
User user;
if (!args.isEmpty()) {
user = findUser(context, args);
} else {
apiClient.sendMessage(loc.localize("commands.general.insult.response.missing"),
context.getChannel());
return;
}
if (user == NO_USER) {
apiClient.sendMessage(loc.localize("commands.general.insult.response.not_found"),
context.getChannel());
return;
}
if (context.getBot().getConfig().isAdmin(user.getId())) {
user = context.getAuthor();
}
String insult = getInsult();
if (insult == null) {
apiClient.sendMessage(loc.localize("commands.general.insult.response.error"),
context.getChannel());
} else {
apiClient.sendMessage(loc.localize("commands.general.insult.response.format",
user.getUsername(), insult),
context.getChannel(), new String[]{user.getId()});
if (isNotAdmin) {
lastInsultTime = Instant.now();
}
}
}
private String getInsult() {
try {
HttpResponse<JsonNode> response = Unirest.get("http://quandyfactory.com/insult/json").
asJson();
if (response.getStatus() != 200) {
VahrhedralBot.LOGGER.warn("Unable to load insult, HTTP {}: {}",
response.getStatus(), response.getStatusText());
return null;
}
JsonNode node = response.getBody();
return SafeNav.of(node).
next(JsonNode::getObject).
next(o -> o.getString("insult")).
get();
} catch (UnirestException e) {
VahrhedralBot.LOGGER.warn("Unable to load insult", e);
return null;
}
}
private boolean checkPermission(Permission permission, Member member, Server server, DiscordApiClient apiClient) {
if (member.getUser().getId().equals(server.getOwnerId())) {
return true;
}
for (String roleId : member.getRoles()) {
Role role = apiClient.getRole(roleId, server);
if (permission.test(role.getPermissions())) {
return true;
}
}
return false;
}
private void selfCheck(MessageContext context, User user) {
if (context.getMessage().getAuthor().equals(user)) {
context.getApiClient().sendMessage(loc.localize("commands.common.self_reference", user.getUsername()),
context.getChannel());
}
}
private void minific(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
String authorId = context.getAuthor().getId();
if (args.startsWith("!#/")) {
if (!context.getBot().getConfig().isAdmin(authorId)) {
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.reject"),
context.getChannel());
return;
}
manageMinific(args, apiClient, context.getChannel());
} else if (args.isEmpty()) {
Minific fic = getRandomMinific();
if (fic == null) {
apiClient.sendMessage(loc.localize("commands.general.minific.response.none"),
context.getChannel());
} else {
User user = apiClient.getUserById(fic.getAuthorId());
apiClient.sendMessage(loc.localize("commands.general.minific.response.random",
fic.getId(), user.getUsername(), fic.getDate(), fic.getContent()),
context.getChannel());
}
} else {
if (minificStorage.getAuthorizedAuthorUids().contains(authorId) ||
context.getBot().getConfig().isAdmin(authorId)) {
Minific fic = addMinific(args, authorId);
apiClient.sendMessage(loc.localize("commands.general.minific.response.added",
fic.getId()),
context.getChannel());
} else {
apiClient.sendMessage(loc.localize("commands.general.minific.response.reject"),
context.getChannel());
}
}
}
private void manageMinific(String args, DiscordApiClient apiClient, Channel ctxChannel) {
String[] split = args.split(" ", 2);
String cmd = split[0].substring(3).toLowerCase();
switch (cmd) {
case "delete":
if (split.length == 2) {
deleteMinificCmd(apiClient, ctxChannel, split[1]);
} else {
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.error"),
ctxChannel);
}
return;
case "setauthor":
if (split.length == 2) {
if (setMinificAuthorCmd(apiClient, ctxChannel, split[1])) {
return;
}
}
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.error"),
ctxChannel);
return;
case "list":
listMinificsCmd(apiClient, ctxChannel);
return;
}
}
private void listMinificsCmd(DiscordApiClient apiClient, Channel ctxChannel) {
StringJoiner joiner = new StringJoiner("\n");
for (Minific minific : minificStorage.getMinifics()) {
String content = minific.getContent();
String excerpt = content.substring(0, Math.min(content.length(), 30)).
replace("\n", " ");
joiner.add(loc.localize("commands.general.minific.response.manage.list.entry",
minific.getId(),
apiClient.getUserById(minific.getAuthorId()).getUsername(),
minific.getDate(),
excerpt));
}
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.list",
minificStorage.getMinifics().size(),
joiner.toString()),
ctxChannel);
}
private boolean setMinificAuthorCmd(DiscordApiClient apiClient, Channel ctxChannel, String s) {
String[] ss = s.split(" ", 2);
if (ss.length == 2) {
String id = ss[0];
String authorId = ss[1];
for (Minific minific : minificStorage.getMinifics()) {
if (minific.getId().equals(id)) {
minific.setAuthorId(authorId);
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.setauthor",
id, authorId),
ctxChannel);
return true;
}
}
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.not_found",
id),
ctxChannel);
return true;
}
return false;
}
private void deleteMinificCmd(DiscordApiClient apiClient, Channel ctxChannel, String id) {
if (deleteMinific(id)) {
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.delete",
id),
ctxChannel);
} else {
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.not_found",
id),
ctxChannel);
}
}
private boolean deleteMinific(String id) {
boolean deleted = false;
List<Minific> minifics = minificStorage.getMinifics();
for (Iterator<Minific> iter = minifics.iterator(); iter.hasNext(); ) {
Minific minific = iter.next();
if (minific.getId().equals(id)) {
iter.remove();
deleted = true;
break;
}
}
if (deleted) {
// Re-ID fics
List<Minific> copy = new ArrayList<>(minifics);
minifics.clear();
for (int i = 0; i < copy.size(); i++) {
Minific minific = copy.get(i);
minific = new Minific(Integer.toString(i), minific.getAuthorId(), minific.getDate(), minific.getContent());
minifics.add(minific);
}
}
return deleted;
}
private Minific getRandomMinific() {
int size = minificStorage.getMinifics().size();
if (size == 0) {
return null;
}
return minificStorage.getMinifics().get(random.nextInt(size));
}
private Minific addMinific(String content, String authorId) {
ZonedDateTime now = ZonedDateTime.now();
Minific minific = new Minific(Integer.toString(minificStorage.getMinifics().size()),
authorId, DATE_FORMATTER.format(now), content);
minificStorage.getMinifics().add(minific);
saveMinificStorage();
return minific;
}
private void saveMinificStorage() {
Gson gson = new Gson();
try (BufferedWriter writer = Files.newBufferedWriter(MINIFIC_STORE, UTF_8, CREATE, TRUNCATE_EXISTING)) {
gson.toJson(minificStorage, writer);
writer.flush();
VahrhedralBot.LOGGER.info("Saved minific store");
} catch (IOException e) {
VahrhedralBot.LOGGER.warn("Unable to save minific store", e);
}
}
private void loadMinificStorage() {
Gson gson = new Gson();
if (!Files.exists(MINIFIC_STORE)) {
minificStorage = new MinificStorage();
saveMinificStorage();
}
try (Reader reader = Files.newBufferedReader(MINIFIC_STORE, UTF_8)) {
minificStorage = gson.fromJson(reader, MinificStorage.class);
VahrhedralBot.LOGGER.info("Loaded minific store");
} catch (IOException e) {
VahrhedralBot.LOGGER.warn("Unable to load minific store", e);
}
}
public void onLogIn(LogInEvent logInEvent) {
loadMinificStorage();
modCommands.onReady();
}
}
|
src/main/java/co/phoenixlab/discord/commands/Commands.java
|
package co.phoenixlab.discord.commands;
import co.phoenixlab.common.lang.SafeNav;
import co.phoenixlab.common.lang.number.ParseInt;
import co.phoenixlab.common.localization.Localizer;
import co.phoenixlab.discord.CommandDispatcher;
import co.phoenixlab.discord.Configuration;
import co.phoenixlab.discord.MessageContext;
import co.phoenixlab.discord.VahrhedralBot;
import co.phoenixlab.discord.api.ApiConst;
import co.phoenixlab.discord.api.DiscordApiClient;
import co.phoenixlab.discord.api.DiscordWebSocketClient;
import co.phoenixlab.discord.api.entities.*;
import co.phoenixlab.discord.api.event.LogInEvent;
import co.phoenixlab.discord.commands.tempstorage.Minific;
import co.phoenixlab.discord.commands.tempstorage.MinificStorage;
import com.google.gson.Gson;
import com.mashape.unirest.http.HttpResponse;
import com.mashape.unirest.http.JsonNode;
import com.mashape.unirest.http.Unirest;
import com.mashape.unirest.http.exceptions.UnirestException;
import org.apache.http.HttpHeaders;
import org.apache.http.entity.ContentType;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.Reader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static co.phoenixlab.discord.api.DiscordApiClient.*;
import static co.phoenixlab.discord.api.entities.Permission.CHAT_MANAGE_MESSAGES;
import static co.phoenixlab.discord.api.entities.Permission.GEN_MANAGE_ROLES;
import static co.phoenixlab.discord.commands.CommandUtil.findUser;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.nio.file.StandardOpenOption.CREATE;
import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING;
public class Commands {
private final AdminCommands adminCommands;
private final DnCommands dnCommands;
private final ModCommands modCommands;
private final Localizer loc;
private final Random random;
// Temporary until command throttling is implemented
private Instant lastInsultTime;
private MinificStorage minificStorage;
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("MMM dd uuuu");
private static final Path MINIFIC_STORE = Paths.get("config/minific.json");
public Commands(VahrhedralBot bot) {
adminCommands = new AdminCommands(bot);
dnCommands = new DnCommands(bot);
modCommands = new ModCommands(bot);
loc = bot.getLocalizer();
random = new Random();
}
public void register(CommandDispatcher d) {
adminCommands.registerAdminCommands();
dnCommands.registerDnCommands();
modCommands.registerModCommands();
d.registerAlwaysActiveCommand("commands.general.admin", this::admin);
d.registerAlwaysActiveCommand("commands.general.mod", this::mod);
d.registerCommand("commands.general.admins", this::listAdmins);
d.registerCommand("commands.general.info", this::info);
d.registerCommand("commands.general.avatar", this::avatar);
d.registerCommand("commands.general.version", this::version);
d.registerCommand("commands.general.stats", this::stats);
d.registerCommand("commands.general.roles", this::roles);
d.registerCommand("commands.general.rolecolor", this::roleColor);
d.registerCommand("commands.general.sandwich", this::makeSandwich);
d.registerCommand("commands.general.dn", this::dnCommands);
d.registerCommand("commands.general.insult", this::insult);
d.registerCommand("commands.general.minific", this::minific);
}
public AdminCommands getAdminCommands() {
return adminCommands;
}
public DnCommands getDnCommands() {
return dnCommands;
}
public ModCommands getModCommands() {
return modCommands;
}
private void admin(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
Message message = context.getMessage();
// Easter egg
if ("ku".equalsIgnoreCase(args)) {
int number = random.nextInt(9) + 1;
int spotY = random.nextInt(2);
String spotYKey;
String spotXKey;
if (spotY == 0) {
spotYKey = "commands.general.admin.response.easter_egg.center";
} else if (spotY == 1) {
spotYKey = "commands.general.admin.response.easter_egg.top";
} else {
spotYKey = "commands.general.admin.response.easter_egg.bottom";
}
int spotX = random.nextInt(2);
if (spotX == 0) {
spotXKey = "commands.general.admin.response.easter_egg.center";
} else if (spotX == 1) {
spotXKey = "commands.general.admin.response.easter_egg.right";
} else {
spotXKey = "commands.general.admin.response.easter_egg.left";
}
String pos;
if (spotX == 0 && spotY == 0) {
pos = loc.localize("commands.general.admin.response.easter_egg.center");
} else {
pos = loc.localize("commands.general.admin.response.easter_egg.tuple",
loc.localize(spotYKey), loc.localize(spotXKey));
}
apiClient.sendMessage(
loc.localize("commands.general.admin.response.easter_egg.format", number, pos),
context.getChannel());
return;
}
// Permission check
if (!context.getBot().getConfig().isAdmin(message.getAuthor().getId())) {
if (context.getDispatcher().active().get()) {
apiClient.sendMessage(
loc.localize("commands.general.admin.response.reject", message.getAuthor().getUsername()),
context.getChannel());
}
return;
}
if (args.isEmpty()) {
args = "help";
}
adminCommands.getAdminCommandDispatcher().
handleCommand(new Message(message.getAuthor(), message.getChannelId(),
args, message.getId(), message.getMentions(), message.getTimestamp()));
}
private void mod(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
Message message = context.getMessage();
User author = context.getAuthor();
Server server = context.getServer();
if (!checkPermission(CHAT_MANAGE_MESSAGES, apiClient.getUserMember(author, server), server, apiClient)) {
apiClient.sendMessage(loc.localize("commands.general.mod.response.reject", author.getUsername()),
context.getChannel());
return;
}
if (args.isEmpty()) {
args = "help";
}
modCommands.getModCommandDispatcher().
handleCommand(new Message(message.getAuthor(), message.getChannelId(),
args, message.getId(), message.getMentions(), message.getTimestamp()));
}
private void listAdmins(MessageContext context, String s) {
DiscordApiClient apiClient = context.getApiClient();
VahrhedralBot bot = context.getBot();
StringJoiner joiner = new StringJoiner(", ");
bot.getConfig().getAdmins().stream().
map(apiClient::getUserById).
filter(user -> user != null).
map(User::getUsername).
forEach(joiner::add);
String res = joiner.toString();
if (res.isEmpty()) {
res = loc.localize("commands.general.admins.response.none");
}
apiClient.sendMessage(loc.localize("commands.general.admins.response.format", res),
context.getChannel());
}
private void info(MessageContext context, String args) {
Message message = context.getMessage();
Configuration config = context.getBot().getConfig();
User user;
if (!args.isEmpty()) {
user = findUser(context, args);
selfCheck(context, user);
} else {
user = message.getAuthor();
}
if (user == NO_USER) {
context.getApiClient().sendMessage(loc.localize("commands.general.info.response.not_found"),
message.getChannelId());
} else {
String avatar = (user.getAvatar() == null ? loc.localize("commands.general.info.response.no_avatar") :
user.getAvatarUrl().toExternalForm());
String response = loc.localize("commands.general.info.response.format",
user.getUsername(), user.getId(),
config.getBlacklist().contains(user.getId()) ?
loc.localize("commands.general.info.response.blacklisted") : "",
config.getAdmins().contains(user.getId()) ?
loc.localize("commands.general.info.response.admin") : "",
avatar);
context.getApiClient().sendMessage(response, context.getChannel());
}
}
private void avatar(MessageContext context, String args) {
Message message = context.getMessage();
if (args.startsWith(loc.localize("commands.general.avatar.subcommand.server"))) {
handleServerAvatar(context, message, args);
return;
}
User user;
if (!args.isEmpty()) {
user = findUser(context, args);
selfCheck(context, user);
} else {
user = message.getAuthor();
}
if (user == NO_USER) {
context.getApiClient().sendMessage(loc.localize("commands.general.avatar.response.not_found"),
message.getChannelId());
} else {
String avatar = (user.getAvatar() == null ?
loc.localize("commands.general.avatar.response.no_avatar") : user.getAvatarUrl().toExternalForm());
context.getApiClient().sendMessage(loc.localize("commands.general.avatar.response.format",
user.getUsername(), avatar),
context.getChannel());
}
}
private void handleServerAvatar(MessageContext context, Message message, String args) {
Server server;
DiscordApiClient apiClient = context.getApiClient();
Channel c = apiClient.getChannelById(message.getChannelId());
if (args.contains(" ")) {
args = args.split(" ", 2)[1];
server = apiClient.getServerByID(args);
if (server == NO_SERVER) {
apiClient.sendMessage(loc.localize("commands.general.avatar.response.server.not_member"),
context.getChannel());
}
} else if (c == null || c.getParent() == NO_SERVER) {
apiClient.sendMessage(loc.localize("commands.general.avatar.response.server.private"),
context.getChannel());
return;
} else {
server = c.getParent();
}
String icon = server.getIcon();
if (icon == null) {
apiClient.sendMessage(loc.localize("commands.general.avatar.response.server.format.none",
server.getName()),
context.getChannel());
} else {
apiClient.sendMessage(loc.localize("commands.general.avatar.response.server.format",
server.getName(), String.format(ApiConst.ICON_URL_PATTERN, server.getId(), icon)),
context.getChannel());
}
}
private void version(MessageContext context, String args) {
context.getApiClient().sendMessage(context.getBot().getVersionInfo(), context.getChannel());
}
private void stats(MessageContext context, String s) {
DiscordApiClient apiClient = context.getApiClient();
CommandDispatcher mainDispatcher = context.getBot().getMainCommandDispatcher();
CommandDispatcher.Statistics mdStats = mainDispatcher.getStatistics();
DiscordWebSocketClient.Statistics wsStats = apiClient.getWebSocketClient().getStatistics();
DiscordApiClient.Statistics apiStats = apiClient.getStatistics();
apiClient.sendMessage(loc.localize("commands.general.stats.response.format",
mdStats.commandHandleTime.summary(),
mdStats.acceptedCommandHandleTime.summary(),
mdStats.commandsReceived.sum(),
mdStats.commandsHandledSuccessfully.sum() + 1, // +1 since this executed OK but hasnt counted yet
mdStats.commandsRejected.sum(),
wsStats.avgMessageHandleTime.summary(),
wsStats.messageReceiveCount.sum(),
wsStats.keepAliveCount.sum(),
wsStats.errorCount.sum(),
apiStats.connectAttemptCount.sum(),
apiStats.eventCount.sum(),
apiStats.eventDispatchErrorCount.sum(),
apiStats.restErrorCount.sum()),
context.getChannel());
}
private void roles(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
Message message = context.getMessage();
User user;
if (!args.isEmpty()) {
user = findUser(context, args);
selfCheck(context, user);
} else {
user = message.getAuthor();
}
if (user == NO_USER) {
context.getApiClient().sendMessage(loc.localize("commands.general.roles.response.not_found"),
message.getChannelId());
} else {
Server server = context.getServer();
if (server == NO_SERVER) {
apiClient.sendMessage(loc.localize("commands.general.roles.response.private"), context.getChannel());
return;
}
Member member = apiClient.getUserMember(user, server);
if (member != NO_MEMBER) {
context.getApiClient().sendMessage(loc.localize("commands.general.roles.response.format",
user.getUsername(), listRoles(member, server, apiClient)),
context.getChannel());
} else {
context.getApiClient().sendMessage(loc.localize("commands.general.roles.response.member_not_found"),
context.getChannel());
}
}
}
private String listRoles(Member member, Server server, DiscordApiClient client) {
if (member.getRoles().isEmpty()) {
return loc.localize("commands.general.roles.response.no_roles");
}
StringJoiner joiner = new StringJoiner(", ");
member.getRoles().stream().
map(s -> client.getRole(s, server)).
filter(r -> r != NO_ROLE).
map(r -> loc.localize("commands.general.roles.response.role.format",
r.getName(), r.getId(), r.getColor())).
forEach(joiner::add);
return joiner.toString();
}
private void roleColor(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
Message message = context.getMessage();
// Check permissions first
Server server = context.getServer();
if (server == NO_SERVER) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.private"),
context.getChannel());
return;
}
Member issuer = apiClient.getUserMember(message.getAuthor(), server);
if (!(checkPermission(GEN_MANAGE_ROLES, issuer, server, apiClient) ||
context.getBot().getConfig().isAdmin(message.getAuthor().getId()))) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.no_user_perms"),
context.getChannel());
return;
}
Member bot = apiClient.getUserMember(apiClient.getClientUser(), server);
if (!checkPermission(GEN_MANAGE_ROLES, bot, server, apiClient)) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.no_bot_perms"),
context.getChannel());
return;
}
String[] split = args.split(" ");
if (split.length != 2) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.help_format"),
context.getChannel());
return;
}
String colorStr = split[1];
OptionalInt colorOpt = ParseInt.parseOptional(colorStr);
if (!colorOpt.isPresent()) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.help_format"),
context.getChannel());
return;
}
int color = colorOpt.getAsInt();
String roleId = split[0];
Role role = apiClient.getRole(roleId, server);
if (role == NO_ROLE) {
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.role_not_found"),
context.getChannel());
return;
}
patchRole(apiClient, message, server, color, role);
}
private void patchRole(DiscordApiClient apiClient, Message message, Server server, int color, Role role) {
try {
Map<String, String> headers = new HashMap<>();
headers.put(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.getMimeType());
headers.put(HttpHeaders.AUTHORIZATION, apiClient.getToken());
JSONObject requestBody = new JSONObject();
requestBody.put("color", color);
requestBody.put("hoist", role.isHoist());
requestBody.put("name", role.getName());
requestBody.put("permissions", role.getPermissions());
HttpResponse<JsonNode> response = Unirest.
patch(ApiConst.SERVERS_ENDPOINT + server.getId() + "/roles/" + role.getId()).
headers(headers).
body(new JsonNode(requestBody.toString())).
asJson();
if (response.getStatus() != 200) {
VahrhedralBot.LOGGER.warn("Unable to PATCH role: HTTP {}: {}",
response.getStatus(), response.getStatusText());
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.general_error"),
message.getChannelId());
return;
}
JsonNode body = response.getBody();
JSONObject obj = body.getObject();
if (obj.getInt("color") != color) {
VahrhedralBot.LOGGER.warn("Unable to PATCH role: Returned color does not match",
response.getStatus(), response.getStatusText());
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.general_error"),
message.getChannelId());
return;
}
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response",
role.getName(), role.getId(), color),
message.getChannelId());
} catch (UnirestException | JSONException e) {
VahrhedralBot.LOGGER.warn("Unable to PATCH role", e);
apiClient.sendMessage(loc.localize("commands.general.rolecolor.response.general_error"),
message.getChannelId());
}
}
private void makeSandwich(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
if (loc.localize("commands.general.sandwich.magic_word").equalsIgnoreCase(args) ||
new Random().nextBoolean()) {
apiClient.sendMessage(loc.localize("commands.general.sandwich.response.deny"),
context.getChannel());
} else {
apiClient.sendMessage(loc.localize("commands.general.sandwich.response.magic"),
context.getChannel());
}
}
private void dnCommands(MessageContext context, String args) {
Message message = context.getMessage();
if (args.isEmpty()) {
args = "help";
}
dnCommands.getDispatcher().
handleCommand(new Message(message.getAuthor(), message.getChannelId(),
args, message.getId(), message.getMentions(), message.getTimestamp()));
}
private void insult(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
Message message = context.getMessage();
boolean isNotAdmin = !context.getBot().getConfig().isAdmin(message.getAuthor().getId());
if (isNotAdmin) {
if (lastInsultTime != null) {
Instant now = Instant.now();
if (now.toEpochMilli() - lastInsultTime.toEpochMilli() < TimeUnit.MINUTES.toMillis(1)) {
apiClient.sendMessage(loc.localize("commands.general.insult.response.timeout"),
message.getChannelId());
return;
}
}
}
User user;
if (!args.isEmpty()) {
user = findUser(context, args);
} else {
apiClient.sendMessage(loc.localize("commands.general.insult.response.missing"),
context.getChannel());
return;
}
if (user == NO_USER) {
apiClient.sendMessage(loc.localize("commands.general.insult.response.not_found"),
context.getChannel());
return;
}
if (context.getBot().getConfig().isAdmin(user.getId())) {
user = context.getAuthor();
}
String insult = getInsult();
if (insult == null) {
apiClient.sendMessage(loc.localize("commands.general.insult.response.error"),
context.getChannel());
} else {
apiClient.sendMessage(loc.localize("commands.general.insult.response.format",
user.getUsername(), insult),
context.getChannel(), new String[]{user.getId()});
if (isNotAdmin) {
lastInsultTime = Instant.now();
}
}
}
private String getInsult() {
try {
HttpResponse<JsonNode> response = Unirest.get("http://quandyfactory.com/insult/json").
asJson();
if (response.getStatus() != 200) {
VahrhedralBot.LOGGER.warn("Unable to load insult, HTTP {}: {}",
response.getStatus(), response.getStatusText());
return null;
}
JsonNode node = response.getBody();
return SafeNav.of(node).
next(JsonNode::getObject).
next(o -> o.getString("insult")).
get();
} catch (UnirestException e) {
VahrhedralBot.LOGGER.warn("Unable to load insult", e);
return null;
}
}
private boolean checkPermission(Permission permission, Member member, Server server, DiscordApiClient apiClient) {
if (member.getUser().getId().equals(server.getOwnerId())) {
return true;
}
for (String roleId : member.getRoles()) {
Role role = apiClient.getRole(roleId, server);
if (permission.test(role.getPermissions())) {
return true;
}
}
return false;
}
private void selfCheck(MessageContext context, User user) {
if (context.getMessage().getAuthor().equals(user)) {
context.getApiClient().sendMessage(loc.localize("commands.common.self_reference", user.getUsername()),
context.getChannel());
}
}
private void minific(MessageContext context, String args) {
DiscordApiClient apiClient = context.getApiClient();
String authorId = context.getAuthor().getId();
if (args.startsWith("!#/")) {
if (!context.getBot().getConfig().isAdmin(authorId)) {
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.reject"),
context.getChannel());
return;
}
manageMinific(args, apiClient, context.getChannel());
} else if (args.isEmpty()) {
Minific fic = getRandomMinific();
if (fic == null) {
apiClient.sendMessage(loc.localize("commands.general.minific.response.none"),
context.getChannel());
} else {
User user = apiClient.getUserById(fic.getAuthorId());
apiClient.sendMessage(loc.localize("commands.general.minific.response.random",
fic.getId(), user.getUsername(), fic.getDate(), fic.getContent()),
context.getChannel());
}
} else {
if (minificStorage.getAuthorizedAuthorUids().contains(authorId) ||
context.getBot().getConfig().isAdmin(authorId)) {
Minific fic = addMinific(args, authorId);
apiClient.sendMessage(loc.localize("commands.general.minific.response.added",
fic.getId()),
context.getChannel());
} else {
apiClient.sendMessage(loc.localize("commands.general.minific.response.reject"),
context.getChannel());
}
}
}
private void manageMinific(String args, DiscordApiClient apiClient, Channel ctxChannel) {
String[] split = args.split(" ", 2);
String cmd = split[0].substring(3).toLowerCase();
switch (cmd) {
case "delete":
if (split.length == 2) {
deleteMinificCmd(apiClient, ctxChannel, split[1]);
} else {
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.error"),
ctxChannel);
}
return;
case "setauthor":
if (split.length == 2) {
if (setMinificAuthorCmd(apiClient, ctxChannel, split[1])) {
return;
}
}
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.error"),
ctxChannel);
return;
case "list":
listMinificsCmd(apiClient, ctxChannel);
return;
}
}
private void listMinificsCmd(DiscordApiClient apiClient, Channel ctxChannel) {
StringJoiner joiner = new StringJoiner("\n");
for (Minific minific : minificStorage.getMinifics()) {
String content = minific.getContent();
String excerpt = content.substring(0, Math.min(content.length(), 30)).
replace("\n", " ");
joiner.add(loc.localize("commands.general.minific.response.manage.list.entry",
minific.getId(),
apiClient.getUserById(minific.getAuthorId()),
minific.getDate(),
excerpt));
}
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.list",
minificStorage.getMinifics().size(),
joiner.toString()),
ctxChannel);
}
private boolean setMinificAuthorCmd(DiscordApiClient apiClient, Channel ctxChannel, String s) {
String[] ss = s.split(" ", 2);
if (ss.length == 2) {
String id = ss[0];
String authorId = ss[1];
for (Minific minific : minificStorage.getMinifics()) {
if (minific.getId().equals(id)) {
minific.setAuthorId(authorId);
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.setauthor",
id, authorId),
ctxChannel);
return true;
}
}
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.not_found",
id),
ctxChannel);
return true;
}
return false;
}
private void deleteMinificCmd(DiscordApiClient apiClient, Channel ctxChannel, String id) {
if (deleteMinific(id)) {
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.delete",
id),
ctxChannel);
} else {
apiClient.sendMessage(loc.localize("commands.general.minific.response.manage.not_found",
id),
ctxChannel);
}
}
private boolean deleteMinific(String id) {
boolean deleted = false;
List<Minific> minifics = minificStorage.getMinifics();
for (Iterator<Minific> iter = minifics.iterator(); iter.hasNext(); ) {
Minific minific = iter.next();
if (minific.getId().equals(id)) {
iter.remove();
deleted = true;
break;
}
}
if (deleted) {
// Re-ID fics
List<Minific> copy = new ArrayList<>(minifics);
minifics.clear();
for (int i = 0; i < copy.size(); i++) {
Minific minific = copy.get(i);
minific = new Minific(Integer.toString(i), minific.getAuthorId(), minific.getDate(), minific.getContent());
minifics.add(minific);
}
}
return deleted;
}
private Minific getRandomMinific() {
int size = minificStorage.getMinifics().size();
if (size == 0) {
return null;
}
return minificStorage.getMinifics().get(random.nextInt(size));
}
private Minific addMinific(String content, String authorId) {
ZonedDateTime now = ZonedDateTime.now();
Minific minific = new Minific(Integer.toString(minificStorage.getMinifics().size()),
authorId, DATE_FORMATTER.format(now), content);
minificStorage.getMinifics().add(minific);
saveMinificStorage();
return minific;
}
private void saveMinificStorage() {
Gson gson = new Gson();
try (BufferedWriter writer = Files.newBufferedWriter(MINIFIC_STORE, UTF_8, CREATE, TRUNCATE_EXISTING)) {
gson.toJson(minificStorage, writer);
writer.flush();
VahrhedralBot.LOGGER.info("Saved minific store");
} catch (IOException e) {
VahrhedralBot.LOGGER.warn("Unable to save minific store", e);
}
}
private void loadMinificStorage() {
Gson gson = new Gson();
if (!Files.exists(MINIFIC_STORE)) {
minificStorage = new MinificStorage();
saveMinificStorage();
}
try (Reader reader = Files.newBufferedReader(MINIFIC_STORE, UTF_8)) {
minificStorage = gson.fromJson(reader, MinificStorage.class);
VahrhedralBot.LOGGER.info("Loaded minific store");
} catch (IOException e) {
VahrhedralBot.LOGGER.warn("Unable to load minific store", e);
}
}
public void onLogIn(LogInEvent logInEvent) {
loadMinificStorage();
modCommands.onReady();
}
}
|
Display username, not obj
|
src/main/java/co/phoenixlab/discord/commands/Commands.java
|
Display username, not obj
|
|
Java
|
mit
|
452956cb52fc01be3a853c5a7dad572fae862313
| 0
|
BjoernPetersen/JMusicBot
|
package com.github.bjoernpetersen.jmusicbot;
import com.github.bjoernpetersen.jmusicbot.config.Config;
import com.github.bjoernpetersen.jmusicbot.config.Config.Entry;
import com.github.bjoernpetersen.jmusicbot.platform.Platform;
import com.github.bjoernpetersen.jmusicbot.platform.Support;
import com.github.bjoernpetersen.jmusicbot.playback.PlaybackFactory;
import com.github.bjoernpetersen.jmusicbot.provider.NoSuchSongException;
import com.github.bjoernpetersen.jmusicbot.provider.Provider;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import javax.annotation.Nonnull;
public class TestProvider implements Provider {
@Nonnull
@Override
public Support getSupport(@Nonnull Platform platform) {
return Support.YES;
}
@Nonnull
@Override
public Class<? extends Provider> getBaseClass() {
return TestProvider.class;
}
@Nonnull
@Override
public String getId() {
return "testprovider";
}
@Nonnull
@Override
public String getReadableName() {
return "TestProvider";
}
@Override
public Set<Class<? extends PlaybackFactory>> getPlaybackDependencies() {
return Collections.emptySet();
}
@Override
public void initialize(@Nonnull InitStateWriter initStateWriter,
@Nonnull PlaybackFactoryManager manager) throws InitializationException {
}
@Nonnull
@Override
public List<Song> search(@Nonnull String query) {
return Collections.emptyList();
}
@Nonnull
@Override
public Song lookup(@Nonnull String id) throws NoSuchSongException {
throw new NoSuchSongException();
}
@Nonnull
@Override
public List<? extends Entry> initializeConfigEntries(@Nonnull Config config) {
return Collections.emptyList();
}
@Override
public void destructConfigEntries() {
}
@Override
public void close() throws IOException {
}
}
|
src/test/java/com/github/bjoernpetersen/jmusicbot/TestProvider.java
|
package com.github.bjoernpetersen.jmusicbot;
import com.github.bjoernpetersen.jmusicbot.config.Config;
import com.github.bjoernpetersen.jmusicbot.config.Config.Entry;
import com.github.bjoernpetersen.jmusicbot.platform.Platform;
import com.github.bjoernpetersen.jmusicbot.platform.Support;
import com.github.bjoernpetersen.jmusicbot.playback.PlaybackFactory;
import com.github.bjoernpetersen.jmusicbot.provider.NoSuchSongException;
import com.github.bjoernpetersen.jmusicbot.provider.Provider;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import javax.annotation.Nonnull;
public class TestProvider implements Provider {
@Nonnull
@Override
public Support getSupport(@Nonnull Platform platform) {
return Support.YES;
}
@Nonnull
@Override
public String getId() {
return "testprovider";
}
@Nonnull
@Override
public String getReadableName() {
return "TestProvider";
}
@Override
public Set<Class<? extends PlaybackFactory>> getPlaybackDependencies() {
return Collections.emptySet();
}
@Override
public void initialize(@Nonnull InitStateWriter initStateWriter,
@Nonnull PlaybackFactoryManager manager) throws InitializationException {
}
@Nonnull
@Override
public List<Song> search(@Nonnull String query) {
return Collections.emptyList();
}
@Nonnull
@Override
public Song lookup(@Nonnull String id) throws NoSuchSongException {
throw new NoSuchSongException();
}
@Nonnull
@Override
public List<? extends Entry> initializeConfigEntries(@Nonnull Config config) {
return Collections.emptyList();
}
@Override
public void destructConfigEntries() {
}
@Override
public void close() throws IOException {
}
}
|
Fix TestProvider
|
src/test/java/com/github/bjoernpetersen/jmusicbot/TestProvider.java
|
Fix TestProvider
|
|
Java
|
mit
|
5027d46650018849212b41f4c74eb673cb2f962b
| 0
|
mrtexaznl/mcproxy
|
package org.mediterraneancoin.proxy;
import java.security.GeneralSecurityException;
import java.security.NoSuchAlgorithmException;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ObjectNode;
import org.mediterraneancoin.miner.SuperHasher;
import org.mediterraneancoin.proxy.McproxyHandler.SessionStorage;
import org.mediterraneancoin.proxy.StratumConnection.ServerWork;
import static org.mediterraneancoin.proxy.net.RPCUtils.tohex;
import org.mediterraneancoin.proxy.net.WorkState;
/**
*
* @author dev4
*/
public class StratumThread implements Runnable {
private static long minDeltaTime = 200; // ms
private static int minQueueLength = 4;
private static int maxQueueLength = 8;
//static String workerName;
//private static String workerPassword;
private long lastGetwork;
private long localMinDeltaTime;
final ObjectMapper mapper = new ObjectMapper();
private static final ConcurrentLinkedQueue<McproxyHandler.SessionStorage> queue = new ConcurrentLinkedQueue<McproxyHandler.SessionStorage>();
private static boolean DEBUG = true;
private static final String prefix = "THREAD ";
private static int counter = 0;
private int threadId;
private StratumConnection stratumConnection;
public StratumThread() {
threadId = counter++;
this.localMinDeltaTime = minDeltaTime;
}
public void start() {
stratumConnection = StratumConnection.getInstance();
new Thread(this).start();
}
public void getWorkFromStratum() {
long now = System.currentTimeMillis();
if (now - lastGetwork < localMinDeltaTime) {
if (DEBUG)
System.out.println(prefix + "too near getWorkFromStratum, skipping; delta = " + (now - lastGetwork) + ", localMinDeltaTime=" + localMinDeltaTime);
try {
Thread.sleep(localMinDeltaTime - (now - lastGetwork));
} catch (InterruptedException ex) {
}
return;
}
System.out.println(prefix + "stratum work request... thread " + threadId);
SessionStorage storage = new SessionStorage();
try {
storage.serverWork = stratumConnection.getWork();
} catch (NoSuchAlgorithmException ex) {
Logger.getLogger(StratumThread.class.getName()).log(Level.SEVERE, null, ex);
} catch (CloneNotSupportedException ex) {
Logger.getLogger(StratumThread.class.getName()).log(Level.SEVERE, null, ex);
}
if (storage.serverWork == null || storage.serverWork.block_header == null ) {
System.out.println(prefix + "thread " + threadId + " getting null! Waiting for a while...");
try {
Thread.sleep(1000);
} catch (InterruptedException ex) {
}
return;
}
storage.work = new WorkState(null);
// storage.serverWork.block_header has to be byteswapped before going through stage1!!
// 00000002ff9fc69577e6881d52ee081d9134f77934435ca6a9fe987548809bd5a8bb5750FFCB5BF7E595E88EFF7390CAEBF12673DF24A2AD81C9EEAA518B5F864E4698AB52FCA55D1b01a8bf00000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000
// parseData does a byteswap of the args!!!
storage.work.parseData( /*WorkState.byteSwap*/( storage.serverWork.block_header ) );
storage.work.setTarget(storage.serverWork.target_hex);
String dataFromWallet = storage.work.getAllDataHex();
if (DEBUG) {
// data has already been byteswapped
System.out.println(prefix + "data: " + dataFromWallet);
//System.out.println(prefix + "storage.serverWork.block_header: " + storage.serverWork.block_header);
//System.out.println(prefix + "byte swap : " + WorkState.byteSwap(storage.serverWork.block_header));
System.out.println(prefix + "target: " + storage.work.getTarget());
}
// let's byteswap the target? NEIN!!!
//storage.work.setTarget( WorkState.byteSwap( storage.work.getTarget() ) );
SuperHasher hasher = null;
try {
hasher = new SuperHasher();
} catch (GeneralSecurityException ex) {
Logger.getLogger(GetworkThread.class.getName()).log(Level.SEVERE, null, ex);
}
byte [] part1 = null;
try {
part1 = hasher.firstPartHash(storage.work.getData1() );
} catch (GeneralSecurityException ex) {
Logger.getLogger(GetworkThread.class.getName()).log(Level.SEVERE, null, ex);
}
if (DEBUG) {
System.out.println(prefix + "part1: " + tohex(part1));
//System.out.println();
}
ObjectNode resultNode = mapper.createObjectNode();
// we need to byteswap data before sending it
String tempData = tohex(part1) + tohex(storage.work.getData2());
String dataStr = WorkState.byteSwap( tempData );
if (DEBUG) {
System.out.println(prefix + "data for miner: " + dataStr);
//System.out.println();
}
resultNode.put("data", dataStr );
resultNode.put("target", storage.work.getTarget());
ObjectNode answerNode = mapper.createObjectNode();
answerNode.put("result", resultNode);
answerNode.put("error", (String)null);
// ...
answerNode.put("id", 1 );
storage.answer = answerNode.toString();
storage.sentData = dataStr;
storage.dataFromWallet = dataFromWallet;
//works.put(dataStr.substring(0, 68*2) , sessionStorage);
if (DEBUG) {
System.out.println(prefix + "json: " + storage.answer);
System.out.println();
System.out.println();
}
lastGetwork = now;
queue.add(storage);
}
public void cleanup() {
while (queue.size() > maxQueueLength) {
if (DEBUG)
System.out.println("queue.size(): " + queue.size());
SessionStorage item = queue.poll();
item.work.setUtils(null);
item.work = null;
item.serverWork.extranonce2 = null;
item.serverWork = null;
}
}
public static SessionStorage getSessionStorage() {
SessionStorage result;
//long dt = 0;
//do {
while ((result = queue.poll()) == null) {
try {
Thread.sleep(minDeltaTime);
} catch (InterruptedException ex) {
}
}
//} while (dt < );
if (DEBUG)
System.out.println("poll from servlet");
return result;
}
@Override
public void run() {
while (true) {
if (DEBUG)
System.out.println("thread " + threadId + ", queue.size()=" + queue.size() + ", minQueueLength=" + minQueueLength +
", localMinDeltaTime=" + localMinDeltaTime);
try {
getWorkFromStratum();
} catch (Exception ex) {
ex.printStackTrace();
}
long now = System.currentTimeMillis();
try {
Thread.sleep( localMinDeltaTime / 2 );
} catch (InterruptedException ex) {
}
cleanup();
//
if (queue.size() <= 1) {
localMinDeltaTime = 10;
} else if (queue.size() < maxQueueLength && queue.size() >= minQueueLength) {
if (DEBUG)
System.out.print(threadId + "***+decreasing localMinDeltaTime from " + localMinDeltaTime + " ");
localMinDeltaTime = (localMinDeltaTime * 85) / 100;
if (localMinDeltaTime < 10) {
localMinDeltaTime = 10;
continue;
}
if (DEBUG)
System.out.println("to " + localMinDeltaTime + " ms");
} else if (queue.size() < minQueueLength) {
if (DEBUG)
System.out.print(threadId + "***decreasing localMinDeltaTime from " + localMinDeltaTime + " ");
localMinDeltaTime = (localMinDeltaTime * 85) / 100;
if (localMinDeltaTime < 10) {
localMinDeltaTime = 10;
continue;
}
if (DEBUG)
System.out.println("to " + localMinDeltaTime + " ms");
} else if (queue.size() >= /*(int)((minQueueLength * 3.) / 2.)*/ maxQueueLength) {
if (DEBUG)
System.out.print(threadId + "+++increasing localMinDeltaTime from " + localMinDeltaTime + " ");
localMinDeltaTime = (localMinDeltaTime * 115) / 100;
if (localMinDeltaTime > 3000)
localMinDeltaTime = 3000;
if (DEBUG)
System.out.println("to " + localMinDeltaTime + " ms");
}
}
}
public static boolean isDEBUG() {
return DEBUG;
}
public static void setDEBUG(boolean _DEBUG) {
DEBUG = _DEBUG;
}
public static long getMinDeltaTime() {
return minDeltaTime;
}
public static void setMinDeltaTime(long minDeltaTime) {
StratumThread.minDeltaTime = minDeltaTime;
}
public long getLastGetwork() {
return lastGetwork;
}
public void setLastGetwork(long lastGetwork) {
this.lastGetwork = lastGetwork;
}
public int getThreadId() {
return threadId;
}
public void setThreadId(int threadId) {
this.threadId = threadId;
}
public static int getMinQueueLength() {
return minQueueLength;
}
public static void setMinQueueLength(int minQueueLength) {
StratumThread.minQueueLength = minQueueLength;
if (maxQueueLength <= StratumThread.minQueueLength)
maxQueueLength = StratumThread.minQueueLength+1;
}
public static int getMaxQueueLength() {
return maxQueueLength;
}
public static void setMaxQueueLength(int maxQueueLength) {
StratumThread.maxQueueLength = maxQueueLength;
}
}
|
mcproxy4/src/org/mediterraneancoin/proxy/StratumThread.java
|
package org.mediterraneancoin.proxy;
import java.security.GeneralSecurityException;
import java.security.NoSuchAlgorithmException;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ObjectNode;
import org.mediterraneancoin.miner.SuperHasher;
import org.mediterraneancoin.proxy.McproxyHandler.SessionStorage;
import org.mediterraneancoin.proxy.StratumConnection.ServerWork;
import static org.mediterraneancoin.proxy.net.RPCUtils.tohex;
import org.mediterraneancoin.proxy.net.WorkState;
/**
*
* @author dev4
*/
public class StratumThread implements Runnable {
private static long minDeltaTime = 200; // ms
private static int minQueueLength = 4;
private static int maxQueueLength = 8;
//static String workerName;
//private static String workerPassword;
private long lastGetwork;
private long localMinDeltaTime;
final ObjectMapper mapper = new ObjectMapper();
private static final ConcurrentLinkedQueue<McproxyHandler.SessionStorage> queue = new ConcurrentLinkedQueue<McproxyHandler.SessionStorage>();
private static boolean DEBUG = true;
private static final String prefix = "THREAD ";
private static int counter = 0;
private int threadId;
private StratumConnection stratumConnection;
public StratumThread() {
threadId = counter++;
this.localMinDeltaTime = minDeltaTime;
}
public void start() {
stratumConnection = StratumConnection.getInstance();
new Thread(this).start();
}
public void getWorkFromStratum() {
long now = System.currentTimeMillis();
if (now - lastGetwork < localMinDeltaTime) {
if (DEBUG)
System.out.println(prefix + "too near getWorkFromStratum, skipping; delta = " + (now - lastGetwork) + ", localMinDeltaTime=" + localMinDeltaTime);
try {
Thread.sleep(localMinDeltaTime - (now - lastGetwork));
} catch (InterruptedException ex) {
}
return;
}
System.out.println(prefix + "stratum work request... thread " + threadId);
SessionStorage storage = new SessionStorage();
try {
storage.serverWork = stratumConnection.getWork();
} catch (NoSuchAlgorithmException ex) {
Logger.getLogger(StratumThread.class.getName()).log(Level.SEVERE, null, ex);
} catch (CloneNotSupportedException ex) {
Logger.getLogger(StratumThread.class.getName()).log(Level.SEVERE, null, ex);
}
if (storage.serverWork == null || storage.serverWork.block_header == null ) {
System.out.println(prefix + "thread " + threadId + " getting null! Waiting for a while...");
try {
Thread.sleep(1000);
} catch (InterruptedException ex) {
}
return;
}
storage.work = new WorkState(null);
// storage.serverWork.block_header has to be byteswapped before going through stage1!!
// 00000002ff9fc69577e6881d52ee081d9134f77934435ca6a9fe987548809bd5a8bb5750FFCB5BF7E595E88EFF7390CAEBF12673DF24A2AD81C9EEAA518B5F864E4698AB52FCA55D1b01a8bf00000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000
// parseData does a byteswap of the args!!!
storage.work.parseData( /*WorkState.byteSwap*/( storage.serverWork.block_header ) );
storage.work.setTarget(storage.serverWork.target_hex);
String dataFromWallet = storage.work.getAllDataHex();
if (DEBUG) {
// data has already been byteswapped
System.out.println(prefix + "data: " + dataFromWallet);
//System.out.println(prefix + "storage.serverWork.block_header: " + storage.serverWork.block_header);
//System.out.println(prefix + "byte swap : " + WorkState.byteSwap(storage.serverWork.block_header));
System.out.println(prefix + "target: " + storage.work.getTarget());
}
// let's byteswap the target
storage.work.setTarget( WorkState.byteSwap( storage.work.getTarget() ) );
SuperHasher hasher = null;
try {
hasher = new SuperHasher();
} catch (GeneralSecurityException ex) {
Logger.getLogger(GetworkThread.class.getName()).log(Level.SEVERE, null, ex);
}
byte [] part1 = null;
try {
part1 = hasher.firstPartHash(storage.work.getData1() );
} catch (GeneralSecurityException ex) {
Logger.getLogger(GetworkThread.class.getName()).log(Level.SEVERE, null, ex);
}
if (DEBUG) {
System.out.println(prefix + "part1: " + tohex(part1));
//System.out.println();
}
ObjectNode resultNode = mapper.createObjectNode();
// we need to byteswap data before sending it
String tempData = tohex(part1) + tohex(storage.work.getData2());
String dataStr = WorkState.byteSwap( tempData );
if (DEBUG) {
System.out.println(prefix + "data for miner: " + dataStr);
//System.out.println();
}
resultNode.put("data", dataStr );
resultNode.put("target", storage.work.getTarget());
ObjectNode answerNode = mapper.createObjectNode();
answerNode.put("result", resultNode);
answerNode.put("error", (String)null);
// ...
answerNode.put("id", 1 );
storage.answer = answerNode.toString();
storage.sentData = dataStr;
storage.dataFromWallet = dataFromWallet;
//works.put(dataStr.substring(0, 68*2) , sessionStorage);
if (DEBUG) {
System.out.println(prefix + "json: " + storage.answer);
System.out.println();
System.out.println();
}
lastGetwork = now;
queue.add(storage);
}
public void cleanup() {
while (queue.size() > maxQueueLength) {
if (DEBUG)
System.out.println("queue.size(): " + queue.size());
SessionStorage item = queue.poll();
item.work.setUtils(null);
item.work = null;
item.serverWork.extranonce2 = null;
item.serverWork = null;
}
}
public static SessionStorage getSessionStorage() {
SessionStorage result;
//long dt = 0;
//do {
while ((result = queue.poll()) == null) {
try {
Thread.sleep(minDeltaTime);
} catch (InterruptedException ex) {
}
}
//} while (dt < );
if (DEBUG)
System.out.println("poll from servlet");
return result;
}
@Override
public void run() {
while (true) {
if (DEBUG)
System.out.println("thread " + threadId + ", queue.size()=" + queue.size() + ", minQueueLength=" + minQueueLength +
", localMinDeltaTime=" + localMinDeltaTime);
try {
getWorkFromStratum();
} catch (Exception ex) {
ex.printStackTrace();
}
long now = System.currentTimeMillis();
try {
Thread.sleep( localMinDeltaTime / 2 );
} catch (InterruptedException ex) {
}
cleanup();
//
if (queue.size() <= 1) {
localMinDeltaTime = 10;
} else if (queue.size() < maxQueueLength && queue.size() >= minQueueLength) {
if (DEBUG)
System.out.print(threadId + "***+decreasing localMinDeltaTime from " + localMinDeltaTime + " ");
localMinDeltaTime = (localMinDeltaTime * 85) / 100;
if (localMinDeltaTime < 10) {
localMinDeltaTime = 10;
continue;
}
if (DEBUG)
System.out.println("to " + localMinDeltaTime + " ms");
} else if (queue.size() < minQueueLength) {
if (DEBUG)
System.out.print(threadId + "***decreasing localMinDeltaTime from " + localMinDeltaTime + " ");
localMinDeltaTime = (localMinDeltaTime * 85) / 100;
if (localMinDeltaTime < 10) {
localMinDeltaTime = 10;
continue;
}
if (DEBUG)
System.out.println("to " + localMinDeltaTime + " ms");
} else if (queue.size() >= /*(int)((minQueueLength * 3.) / 2.)*/ maxQueueLength) {
if (DEBUG)
System.out.print(threadId + "+++increasing localMinDeltaTime from " + localMinDeltaTime + " ");
localMinDeltaTime = (localMinDeltaTime * 115) / 100;
if (localMinDeltaTime > 3000)
localMinDeltaTime = 3000;
if (DEBUG)
System.out.println("to " + localMinDeltaTime + " ms");
}
}
}
public static boolean isDEBUG() {
return DEBUG;
}
public static void setDEBUG(boolean _DEBUG) {
DEBUG = _DEBUG;
}
public static long getMinDeltaTime() {
return minDeltaTime;
}
public static void setMinDeltaTime(long minDeltaTime) {
StratumThread.minDeltaTime = minDeltaTime;
}
public long getLastGetwork() {
return lastGetwork;
}
public void setLastGetwork(long lastGetwork) {
this.lastGetwork = lastGetwork;
}
public int getThreadId() {
return threadId;
}
public void setThreadId(int threadId) {
this.threadId = threadId;
}
public static int getMinQueueLength() {
return minQueueLength;
}
public static void setMinQueueLength(int minQueueLength) {
StratumThread.minQueueLength = minQueueLength;
if (maxQueueLength <= StratumThread.minQueueLength)
maxQueueLength = StratumThread.minQueueLength+1;
}
public static int getMaxQueueLength() {
return maxQueueLength;
}
public static void setMaxQueueLength(int maxQueueLength) {
StratumThread.maxQueueLength = maxQueueLength;
}
}
|
stratum support
|
mcproxy4/src/org/mediterraneancoin/proxy/StratumThread.java
|
stratum support
|
|
Java
|
epl-1.0
|
70f22e846360709f40083466e5a2f1bb78b125dc
| 0
|
BraintagsGmbH/netrelay,BraintagsGmbH/netrelay
|
/*
* #%L
* netrelay
* %%
* Copyright (C) 2015 Braintags GmbH
* %%
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
* #L%
*/
package de.braintags.netrelay.controller;
import java.util.Objects;
import java.util.Properties;
import de.braintags.netrelay.routing.RouterDefinition;
import de.braintags.vertx.util.DebugDetection;
import io.vertx.ext.web.RoutingContext;
import io.vertx.ext.web.handler.SessionHandler;
import io.vertx.ext.web.sstore.ClusteredSessionStore;
import io.vertx.ext.web.sstore.LocalSessionStore;
/**
* SessionController uses a {@link SessionHandler} internally to implement session handling for all browser sessions
*
* <br>
* <br>
* Config-Parameter:<br/>
* <UL>
* <LI>{@value #SESSION_STORE_PROP}<br/>
* <LI>{@value #EXPIRATION_STORE_PROP}<br/>
* <LI>{@value #SESSION_MAP_NAME_PROP}<br/>
* </UL>
* <br>
* Request-Parameter:<br/>
* <br/>
* Result-Parameter:<br/>
* <br/>
*
* @author Michael Remme
*/
public class SessionController extends AbstractController {
/**
* The name of the property which defines, which {@link io.vertx.ext.web.sstore.SessionStore} shall be used.
* References to {@link SessionStore}. Possible values are {@link SessionStore#LOCAL_SESSION_STORE} and
* {@link SessionStore#CLUSTERED_SESSION_STORE}
*/
public static final String SESSION_STORE_PROP = "sessionStore";
/**
* The name of the property, which defines in a time unit, when a session expires. Possible definitions are:
* 30000 = milliseconds
* 30 m = 30 minutes
*/
public static final String EXPIRATION_STORE_PROP = "expiration";
/**
* The name of the property which defines the name of the Map, where inside sessions are stored
*/
public static final String SESSION_MAP_NAME_PROP = "sessionMapName";
/**
* The default time, when a session expires in milliseconds
*/
public static final String DEFAULT_SESSION_EXPIRATION = "30 m";
private SessionHandler sessionHandler;
/**
*
*/
public SessionController() {
}
@Override
public void initProperties(Properties properties) {
String storeDef = (String) properties.get(SESSION_STORE_PROP);
Objects.requireNonNull(storeDef);
SessionStore store = SessionStore.valueOf(storeDef);
switch (store) {
case LOCAL_SESSION_STORE:
sessionHandler = SessionHandler
.create(LocalSessionStore.create(getVertx(), getSessionMapName(properties), parseExpiration(properties)))
.setCookieHttpOnlyFlag(true).setCookieSecureFlag(!DebugDetection.isTest());
break;
case CLUSTERED_SESSION_STORE:
sessionHandler = SessionHandler.create(ClusteredSessionStore.create(getVertx(), getSessionMapName(properties)));
break;
default:
throw new UnsupportedOperationException(store.toString());
}
}
private long parseExpiration(Properties properties) {
String timeString = (String) properties.getOrDefault(EXPIRATION_STORE_PROP, DEFAULT_SESSION_EXPIRATION);
if (timeString.endsWith("m")) {
return Long.parseLong(timeString.substring(0, timeString.length() - 1).trim()) * 60 * 1000;
} else {
return Long.parseLong(timeString);
}
}
private String getSessionMapName(Properties properties) {
return (String) properties.getOrDefault(SESSION_MAP_NAME_PROP, "sessionMap");
}
/*
* (non-Javadoc)
*
* @see io.vertx.core.Handler#handle(java.lang.Object)
*/
@Override
public void handleController(RoutingContext context) {
sessionHandler.handle(context);
}
/**
* Creates a default definition for the current instance
*
* @return
*/
public static RouterDefinition createDefaultRouterDefinition() {
RouterDefinition def = new RouterDefinition();
def.setName(SessionController.class.getSimpleName());
def.setBlocking(false);
def.setController(SessionController.class);
def.setHandlerProperties(getDefaultProperties());
return def;
}
/**
* Get the default properties for an implementation of StaticController
*
* @return
*/
public static Properties getDefaultProperties() {
Properties json = new Properties();
json.put(SESSION_STORE_PROP, SessionStore.LOCAL_SESSION_STORE.toString());
json.put(EXPIRATION_STORE_PROP, String.valueOf(DEFAULT_SESSION_EXPIRATION));
return json;
}
}
|
src/main/java/de/braintags/netrelay/controller/SessionController.java
|
/*
* #%L
* netrelay
* %%
* Copyright (C) 2015 Braintags GmbH
* %%
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
* #L%
*/
package de.braintags.netrelay.controller;
import java.util.Objects;
import java.util.Properties;
import de.braintags.netrelay.routing.RouterDefinition;
import de.braintags.vertx.services.base.util.EclipseDetection;
import io.vertx.ext.web.RoutingContext;
import io.vertx.ext.web.handler.SessionHandler;
import io.vertx.ext.web.sstore.ClusteredSessionStore;
import io.vertx.ext.web.sstore.LocalSessionStore;
/**
* SessionController uses a {@link SessionHandler} internally to implement session handling for all browser sessions
*
* <br>
* <br>
* Config-Parameter:<br/>
* <UL>
* <LI>{@value #SESSION_STORE_PROP}<br/>
* <LI>{@value #EXPIRATION_STORE_PROP}<br/>
* <LI>{@value #SESSION_MAP_NAME_PROP}<br/>
* </UL>
* <br>
* Request-Parameter:<br/>
* <br/>
* Result-Parameter:<br/>
* <br/>
*
* @author Michael Remme
*/
public class SessionController extends AbstractController {
/**
* The name of the property which defines, which {@link io.vertx.ext.web.sstore.SessionStore} shall be used.
* References to {@link SessionStore}. Possible values are {@link SessionStore#LOCAL_SESSION_STORE} and
* {@link SessionStore#CLUSTERED_SESSION_STORE}
*/
public static final String SESSION_STORE_PROP = "sessionStore";
/**
* The name of the property, which defines in a time unit, when a session expires. Possible definitions are:
* 30000 = milliseconds
* 30 m = 30 minutes
*/
public static final String EXPIRATION_STORE_PROP = "expiration";
/**
* The name of the property which defines the name of the Map, where inside sessions are stored
*/
public static final String SESSION_MAP_NAME_PROP = "sessionMapName";
/**
* The default time, when a session expires in milliseconds
*/
public static final String DEFAULT_SESSION_EXPIRATION = "30 m";
private SessionHandler sessionHandler;
/**
*
*/
public SessionController() {
}
@Override
public void initProperties(Properties properties) {
String storeDef = (String) properties.get(SESSION_STORE_PROP);
Objects.requireNonNull(storeDef);
SessionStore store = SessionStore.valueOf(storeDef);
switch (store) {
case LOCAL_SESSION_STORE:
sessionHandler = SessionHandler
.create(LocalSessionStore.create(getVertx(), getSessionMapName(properties), parseExpiration(properties)))
.setCookieHttpOnlyFlag(true).setCookieSecureFlag(!EclipseDetection.isTest());
break;
case CLUSTERED_SESSION_STORE:
sessionHandler = SessionHandler.create(ClusteredSessionStore.create(getVertx(), getSessionMapName(properties)));
break;
default:
throw new UnsupportedOperationException(store.toString());
}
}
private long parseExpiration(Properties properties) {
String timeString = (String) properties.getOrDefault(EXPIRATION_STORE_PROP, DEFAULT_SESSION_EXPIRATION);
if (timeString.endsWith("m")) {
return Long.parseLong(timeString.substring(0, timeString.length() - 1).trim()) * 60 * 1000;
} else {
return Long.parseLong(timeString);
}
}
private String getSessionMapName(Properties properties) {
return (String) properties.getOrDefault(SESSION_MAP_NAME_PROP, "sessionMap");
}
/*
* (non-Javadoc)
*
* @see io.vertx.core.Handler#handle(java.lang.Object)
*/
@Override
public void handleController(RoutingContext context) {
sessionHandler.handle(context);
}
/**
* Creates a default definition for the current instance
*
* @return
*/
public static RouterDefinition createDefaultRouterDefinition() {
RouterDefinition def = new RouterDefinition();
def.setName(SessionController.class.getSimpleName());
def.setBlocking(false);
def.setController(SessionController.class);
def.setHandlerProperties(getDefaultProperties());
return def;
}
/**
* Get the default properties for an implementation of StaticController
*
* @return
*/
public static Properties getDefaultProperties() {
Properties json = new Properties();
json.put(SESSION_STORE_PROP, SessionStore.LOCAL_SESSION_STORE.toString());
json.put(EXPIRATION_STORE_PROP, String.valueOf(DEFAULT_SESSION_EXPIRATION));
return json;
}
}
|
refactoring debug detection
|
src/main/java/de/braintags/netrelay/controller/SessionController.java
|
refactoring debug detection
|
|
Java
|
agpl-3.0
|
f97d49c8ecc417832cf8a7205bc7e78fc7c1d845
| 0
|
atomicjets/exporter,bio4j/exporter
|
package com.bio4j.exporter;
import java.util.Scanner;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
public class Bio4jExporter {
public static void main(String[] args) {
Options options = generateOptions();
BasicParser parser = new BasicParser();
ExporterCore exporter = new ExporterCore();
try {
// parsing the command line arguments
if (!parseCmdLineArgs(args, options, parser, exporter)) {
return; // user asked for help or to quit, exporter stops
}
printBanner();
Scanner scanIn = new Scanner(System.in);
if (exporter.getFormat() == null) {
System.out
.print("Please state the desired output file format (Gexf/Graphml/GraphSON): ");
String format = scanIn.nextLine();
if(checkQuit(exporter, scanIn, format))
return;
exporter.setFormat(format);
}
if (exporter.getSource() == null) {
System.out.print("Please state input source adress: ");
String source = scanIn.nextLine();
if(checkQuit(exporter, scanIn, source))
return;
exporter.setSource(source);
}
if(exporter.getQuery() != null){
exporter.runQuery();
}
readEvalPrintLoop(exporter);
} catch (Exception exp) {
System.out.println("Invalid query, caught unexpected exception: " + exp.getMessage());
readEvalPrintLoop(exporter);
}
}
/**
* @param exporter
*/
private static void readEvalPrintLoop(ExporterCore exporter) {
Scanner scanIn;
// REPL
while(true){
scanIn = new Scanner(System.in);
System.out.print("bio4jexporter> ");
String query = scanIn.nextLine();
if(checkQuit(exporter, scanIn, query))
return;
exporter.setQuery(query);
exporter.runQuery();
}
}
/**
* @param exporter
* @param scanIn
* @param query
* @return
*/
private static boolean checkQuit(ExporterCore exporter, Scanner scanIn,
String query) {
if(query.equalsIgnoreCase("quit") || query.equalsIgnoreCase("exit") ){
scanIn.close();
exporter.shutdownGraph();
return true;
} else
return false;
}
/**
* @param args
* @param options
* @param parser
* @param exporter
* @throws ParseException
* @throws Exception
*/
private static boolean parseCmdLineArgs(String[] args, Options options,
BasicParser parser, ExporterCore exporter) throws ParseException,
Exception {
CommandLine cmd = parser.parse(options, args);
if (cmd.hasOption("quit")) {
return false;
}
if (cmd.hasOption("help")) {
// automatically generate the help statement
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("bio4j-exporter", options);
return false;
}
if (cmd.hasOption("output-format")) {
exporter.setFormat(cmd.getOptionValue("output-format"));
}
if (cmd.hasOption("limit")) {
exporter.setLimit(cmd.getOptionValue("limit"));
}
if (cmd.hasOption("max-time")) {
exporter.setMaxTime(cmd.getOptionValue("max-time"));
}
if (cmd.hasOption("stream")) {
exporter.setStream(cmd.getOptionValue("stream"));
}
if (cmd.hasOption("source")) {
exporter.setSource(cmd.getOptionValue("source"));
}
if (cmd.hasOption("query")) {
exporter.setQuery(cmd.getOptionValue("query"));
}
return true;
}
@SuppressWarnings("static-access")
private static Options generateOptions() {
Options options = new Options();
options.addOption(OptionBuilder.withLongOpt("output-format")
.withDescription("output file format (Gexf/Graphml/GraphSON)")
.hasArg().withArgName("FORMAT").create());
options.addOption(OptionBuilder.withLongOpt("limit")
.withDescription("limit for results").hasArg()
.withArgName("LIMIT").create());
options.addOption(OptionBuilder.withLongOpt("max-time")
.withDescription("maximum elapsed time").hasArg()
.withArgName("TIME").create());
options.addOption("s", "stream", true, "stream results ([yes]/[no])");
options.addOption("h", "help", false, "print help statement");
options.addOption("src", "source", true,
"input source adress");
options.addOption("qr", "query", true,
"query expressed in Gremlin Graph Querying Language");
options.addOption("q", "quit", false,
"exit from the exporter");
return options;
}
private static void printBanner() {
String banner = " __ __ _____ __ __ \n"
+ "| |--.|__|.-----.| | | |__| .-----..--.--..-----..-----..----.| |_ .-----..----.\n"
+ "| _ || || _ ||__ | | | | -__||_ _|| _ || _ || _|| _|| -__|| _|\n"
+ "|_____||__||_____| |__| | | |_____||__.__|| __||_____||__| |____||_____||__| \n"
+ " |___| |__| ";
System.out.println(banner);
}
}
|
src/main/java/com/bio4j/exporter/Bio4jExporter.java
|
package com.bio4j.exporter;
import java.util.Scanner;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
public class Bio4jExporter {
public static void main(String[] args) {
Options options = generateOptions();
BasicParser parser = new BasicParser();
ExporterCore exporter = new ExporterCore();
try {
// parsing the command line arguments
if (!parseCmdLineArgs(args, options, parser, exporter)) {
return; // user asked for help or to quit, exporter stops
}
printBanner();
Scanner scanIn = new Scanner(System.in);
if (exporter.getFormat() == null) {
System.out
.print("Please state the desired output file format (Gexf/Graphml/GraphSON): ");
String format = scanIn.nextLine();
if(checkQuit(exporter, scanIn, format))
return;
exporter.setFormat(format);
}
if (exporter.getSource() == null) {
System.out.print("Please state input source adress: ");
String source = scanIn.nextLine();
if(checkQuit(exporter, scanIn, source))
return;
exporter.setSource(source);
}
System.out.println("Attempting to run query. . . ");
exporter.runQuery();
readEvalPrintLoop(exporter);
} catch (Exception exp) {
System.out.println("Invalid query, caught unexpected exception: " + exp.getMessage());
readEvalPrintLoop(exporter);
}
}
/**
* @param exporter
*/
private static void readEvalPrintLoop(ExporterCore exporter) {
Scanner scanIn;
// REPL
while(true){
scanIn = new Scanner(System.in);
System.out.print("bio4jexporter> ");
String query = scanIn.nextLine();
if(checkQuit(exporter, scanIn, query))
return;
exporter.setQuery(query);
exporter.runQuery();
}
}
/**
* @param exporter
* @param scanIn
* @param query
* @return
*/
private static boolean checkQuit(ExporterCore exporter, Scanner scanIn,
String query) {
if(query.equalsIgnoreCase("quit")){
scanIn.close();
exporter.shutdownGraph();
return true;
} else
return false;
}
/**
* @param args
* @param options
* @param parser
* @param exporter
* @throws ParseException
* @throws Exception
*/
private static boolean parseCmdLineArgs(String[] args, Options options,
BasicParser parser, ExporterCore exporter) throws ParseException,
Exception {
CommandLine cmd = parser.parse(options, args);
if (cmd.hasOption("quit")) {
return false;
}
if (cmd.hasOption("help")) {
// automatically generate the help statement
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("bio4j-exporter", options);
return false;
}
if (cmd.hasOption("output-format")) {
exporter.setFormat(cmd.getOptionValue("output-format"));
}
if (cmd.hasOption("limit")) {
exporter.setLimit(cmd.getOptionValue("limit"));
}
if (cmd.hasOption("max-time")) {
exporter.setMaxTime(cmd.getOptionValue("max-time"));
}
if (cmd.hasOption("stream")) {
exporter.setStream(cmd.getOptionValue("stream"));
}
if (cmd.hasOption("source")) {
exporter.setSource(cmd.getOptionValue("source"));
}
if (cmd.hasOption("query")) {
exporter.setQuery(cmd.getOptionValue("query"));
}
return true;
}
@SuppressWarnings("static-access")
private static Options generateOptions() {
Options options = new Options();
options.addOption(OptionBuilder.withLongOpt("output-format")
.withDescription("output file format (Gexf/Graphml/GraphSON)")
.hasArg().withArgName("FORMAT").create());
options.addOption(OptionBuilder.withLongOpt("limit")
.withDescription("limit for results").hasArg()
.withArgName("LIMIT").create());
options.addOption(OptionBuilder.withLongOpt("max-time")
.withDescription("maximum elapsed time").hasArg()
.withArgName("TIME").create());
options.addOption("s", "stream", true, "stream results ([yes]/[no])");
options.addOption("h", "help", false, "print help statement");
options.addOption("src", "source", true,
"input source adress");
options.addOption("qr", "query", true,
"query expressed in Gremlin Graph Querying Language");
options.addOption("q", "quit", false,
"exit from the exporter");
return options;
}
private static void printBanner() {
String banner = " __ __ _____ __ __ \n"
+ "| |--.|__|.-----.| | | |__| .-----..--.--..-----..-----..----.| |_ .-----..----.\n"
+ "| _ || || _ ||__ | | | | -__||_ _|| _ || _ || _|| _|| -__|| _|\n"
+ "|_____||__||_____| |__| | | |_____||__.__|| __||_____||__| |____||_____||__| \n"
+ " |___| |__| ";
System.out.println(banner);
}
}
|
Improve REPL
|
src/main/java/com/bio4j/exporter/Bio4jExporter.java
|
Improve REPL
|
|
Java
|
lgpl-2.1
|
8346dc276a358284c571e23f62e57c1c50a46b61
| 0
|
ethaneldridge/vassal,ethaneldridge/vassal,ethaneldridge/vassal
|
/*
*
* Copyright (c) 2008-2020 by Rodney Kinney, Brent Easton
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License (LGPL) as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, copies are available
* at http://www.opensource.org.
*/
package VASSAL.configure;
import VASSAL.i18n.Resources;
import VASSAL.tools.NamedKeyManager;
import VASSAL.tools.NamedKeyStroke;
import VASSAL.tools.icon.IconFactory;
import VASSAL.tools.icon.IconFamily;
import VASSAL.tools.swing.SwingUtils;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JLayer;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.KeyStroke;
import javax.swing.plaf.LayerUI;
import javax.swing.text.AbstractDocument;
import javax.swing.text.AttributeSet;
import javax.swing.text.BadLocationException;
import javax.swing.text.DocumentFilter;
import net.miginfocom.swing.MigLayout;
/**
* A configurer for Configuring Key Strokes. It allows the entry of either
* a standard keystroke, or a Named command.
*
* It contains two separate Text fields, one for the Name and one for the keystroke.
* A user can fill in one or the other. Filling in one, clears the other.
*
* This Configurer has a limited undo function. Whenever one of the two fields gains focus,
* the current state of the Configurer is saved and the Undo button enabled.
* The undo button will return to the state when that field gained focus.
* This provides a one-step undo if a user accidentally types in one of the fields and
* wipes out data in the other field.
*/
public class NamedHotKeyConfigurer extends Configurer implements FocusListener {
private static final String STROKE_HINT = Resources.getString("Editor.NamedHotKeyConfigurer.keystroke");
private static final String NAME_HINT = Resources.getString("Editor.NamedHotKeyConfigurer.command");
private HintTextField keyStroke;
private HintTextField keyName;
private JPanel controls;
private String lastValue;
private JButton undoButton;
private HintTextField getKeyStroke() {
if (keyStroke == null) {
keyStroke = new HintTextField(StringConfigurer.DEFAULT_LENGTH, STROKE_HINT);
}
return keyStroke;
}
private HintTextField getKeyName() {
if (keyName == null) {
keyName = new HintTextField(StringConfigurer.DEFAULT_LENGTH, NAME_HINT);
}
return keyName;
}
public static String getFancyString(NamedKeyStroke k) {
String s = getString(k);
if (s.length() > 0) {
s = "[" + s + "]";
}
return s;
}
/**
* Return a String representation of a NamedKeyStroke
* @param k NamedKeyStroke
* @return String representation
*/
public static String getString(NamedKeyStroke k) {
return (k == null || k.isNull()) ? "" : getString(k.getStroke());
}
/**
* Return a string representation of a KeyStroke
* @param k KeyStroke
* @return String representation
*/
public static String getString(KeyStroke k) {
return NamedKeyManager.isNamed(k) ? "" : HotKeyConfigurer.getString(k);
}
public NamedHotKeyConfigurer(String key, String name, NamedKeyStroke val) {
super(key, name, val);
}
public NamedHotKeyConfigurer(String key, String name) {
this(key, name, NamedKeyStroke.NULL_KEYSTROKE);
}
public NamedHotKeyConfigurer(NamedKeyStroke val) {
this(null, null, val);
}
public NamedHotKeyConfigurer() {
this(null);
}
@Override
public String getValueString() {
return encode((NamedKeyStroke) getValue());
}
public NamedKeyStroke getValueNamedKeyStroke() {
return (NamedKeyStroke) value;
}
public boolean isNamed() {
return value != null && ((NamedKeyStroke) value).isNamed();
}
@Override
public void requestFocus() {
if (keyName != null) {
keyName.requestFocus();
}
}
@Override
public void setValue(Object o) {
setFrozen(true); // Prevent changes to the input fields triggering further updates
if (controls != null && !noUpdate) {
final NamedKeyStroke stroke = (NamedKeyStroke) o;
if (stroke != null && stroke.isNamed()) {
keyName.setText(stroke.getName());
keyStroke.setText("");
}
else {
keyName.setText("");
keyStroke.setText(getString(stroke));
}
updateVisibility();
}
setFrozen(false);
super.setValue(o);
}
protected void updateVisibility() {
getKeyName().setFocusOnly(isNonNullValue());
getKeyStroke().setFocusOnly(isNonNullValue());
}
private boolean isNonNullValue() {
return value != null && !((NamedKeyStroke) value).isNull();
}
@Override
public void setValue(String s) {
setValue(s == null ? null : decode(s));
}
private void updateValueFromKeyName() {
if (! isFrozen()) {
final String key = keyName.getText();
if (key.isEmpty()) {
setValue(NamedKeyStroke.NULL_KEYSTROKE);
}
else {
setValue(NamedKeyStroke.of(key));
}
}
}
@Override
public Component getControls() {
if (controls == null) {
controls = new ConfigurerPanel(getName(), "[fill,grow]", "[][fill,grow]"); // NON-NLS
keyStroke = getKeyStroke();
keyStroke.setMaximumSize(new Dimension(keyStroke.getMaximumSize().width, keyStroke.getPreferredSize().height));
keyStroke.setText(keyToString());
keyStroke.addKeyListener(new KeyStrokeAdapter());
((AbstractDocument) keyStroke.getDocument()).setDocumentFilter(new KeyStrokeFilter());
keyStroke.addFocusListener(this);
keyName = getKeyName();
keyName.setMaximumSize(new Dimension(keyName.getMaximumSize().width, keyName.getPreferredSize().height));
keyName.setText(getValueNamedKeyStroke() == null ? null : getValueNamedKeyStroke().getName());
((AbstractDocument) keyName.getDocument()).setDocumentFilter(new KeyNameFilter());
keyName.addFocusListener(this);
final JPanel panel = new JPanel(new MigLayout("ins 0", "[fill,grow]0[]0[fill,grow]0[]")); // NON-NLS
final LayerUI<JTextField> layerUI = new ConfigLayerUI(this);
final JLayer<JTextField> nameLayer = new JLayer<>(keyName, layerUI);
panel.add(nameLayer, "grow"); // NON-NLS
panel.add(new JLabel("-")); // NON-NLS
final JLayer<JTextField> keyLayer = new JLayer<>(keyStroke, layerUI);
panel.add(keyLayer, "grow"); // NON-NLS
undoButton = new JButton(IconFactory.getIcon("edit-undo", IconFamily.XSMALL)); // NON-NLS
final int size = (int) keyName.getPreferredSize().getHeight();
undoButton.setPreferredSize(new Dimension(size, size));
undoButton.setMaximumSize(new Dimension(size, size));
undoButton.addActionListener(e -> undo());
undoButton.setEnabled(false);
undoButton.setToolTipText(Resources.getString("Editor.undo"));
panel.add(undoButton);
controls.add(panel, "grow"); // NON-NLS
updateVisibility();
}
return controls;
}
private void undo() {
if (lastValue != null) {
setValue(lastValue);
lastValue = null;
undoButton.setEnabled(false);
keyName.requestFocus();
}
}
@Override
public void focusGained(FocusEvent e) {
lastValue = getValueString();
undoButton.setEnabled(true);
}
@Override
public void focusLost(FocusEvent e) {
}
public String keyToString() {
return getString((NamedKeyStroke) getValue());
}
protected boolean isPrintableAscii(char c) {
return isPrintableAscii((int) c);
}
protected boolean isPrintableAscii(int i) {
return i >= ' ' && i <= '~';
}
/**
* Decode a String into a NamedKeyStroke
*/
public static NamedKeyStroke decode(String s) {
if (s == null) {
return NamedKeyStroke.NULL_KEYSTROKE;
}
final String[] parts = s.split(",");
if (parts.length < 2) {
return NamedKeyStroke.NULL_KEYSTROKE;
}
try {
final KeyStroke stroke = KeyStroke.getKeyStroke(
Integer.parseInt(parts[0]),
Integer.parseInt(parts[1])
);
String name = null;
if (parts.length > 2) {
name = parts[2];
}
return NamedKeyStroke.of(stroke, name);
}
catch (Exception e) {
return NamedKeyStroke.NULL_KEYSTROKE;
}
}
/**
* Encode a NamedKeyStroke into a String
*/
public static String encode(NamedKeyStroke stroke) {
if (stroke == null) {
return "";
}
final KeyStroke key = stroke.getStroke();
if (key == null) {
return "";
}
String s = key.getKeyCode() + "," + key.getModifiers();
if (stroke.isNamed()) {
s += "," + stroke.getName();
}
return s;
}
@Override
public void setHighlighted(boolean highlighted) {
super.setHighlighted(highlighted);
getKeyStroke().setBackground(highlighted ? LIST_ENTRY_HIGHLIGHT_COLOR : Color.white);
getKeyName().setBackground(highlighted ? LIST_ENTRY_HIGHLIGHT_COLOR : Color.white);
getKeyStroke().repaint();
getKeyName().repaint();
}
@Override
public void addFocusListener(FocusListener listener) {
super.addFocusListener(listener);
getKeyStroke().addFocusListener(listener);
getKeyName().addFocusListener(listener);
}
@Override
public void removeFocusListener(FocusListener listener) {
super.removeFocusListener(listener);
getKeyStroke().removeFocusListener(listener);
getKeyName().removeFocusListener(listener);
}
// Use JLayer to outline the fields in Red as the Unix LaF ignores TextField background colours
private static class ConfigLayerUI extends LayerUI<JTextField> {
private static final long serialVersionUID = 1L;
private final Configurer parent;
public ConfigLayerUI(Configurer parent) {
this.parent = parent;
}
@Override
public void paint(Graphics g, JComponent c) {
super.paint(g, c);
final Component cc = ((JLayer) c).getView();
if (parent.isHighlighted()) {
final Dimension d = cc.getSize();
g.setColor(Color.red);
g.drawRect(0, 0, d.width - 2, d.height - 2);
}
}
}
private class KeyStrokeAdapter extends KeyAdapter {
@Override
public void keyPressed(KeyEvent e) {
// reportKeyEvent("KEY_PRESSED", e); // NON-NLS
switch (e.getKeyCode()) {
case KeyEvent.VK_DELETE:
case KeyEvent.VK_BACK_SPACE:
setValue(NamedKeyStroke.NULL_KEYSTROKE);
break;
case KeyEvent.VK_SHIFT:
case KeyEvent.VK_CONTROL:
case KeyEvent.VK_META:
case KeyEvent.VK_ALT:
case KeyEvent.VK_ALT_GRAPH:
case KeyEvent.VK_UNDEFINED:
break;
default:
setValue(NamedKeyStroke.of(SwingUtils.convertKeyEvent(e)));
}
}
// Repeat the Key handling for each Key of interest on release.
// This has no effect on Windows, but caters for the bizarre
// KeyEvent sequences created on MacOS.
@Override
public void keyReleased(KeyEvent e) {
// reportKeyEvent("KEY_RELEASED", e); // NON-NLS
switch (e.getKeyCode()) {
case KeyEvent.VK_DELETE:
case KeyEvent.VK_BACK_SPACE:
setValue(NamedKeyStroke.NULL_KEYSTROKE);
break;
case KeyEvent.VK_SHIFT:
case KeyEvent.VK_CONTROL:
case KeyEvent.VK_META:
case KeyEvent.VK_ALT:
case KeyEvent.VK_ALT_GRAPH:
case KeyEvent.VK_UNDEFINED:
break;
default:
setValue(NamedKeyStroke.of(SwingUtils.convertKeyEvent(e)));
}
}
}
private class KeyNameFilter extends DocumentFilter {
@Override
public void remove(FilterBypass fb, int offset, int length) throws BadLocationException {
super.remove(fb, offset, length);
updateValueFromKeyName();
keyName.setCaretPosition(offset);
}
@Override
public void insertString(FilterBypass fb, int offset, String string, AttributeSet attr) throws BadLocationException {
super.insertString(fb, offset, string, attr);
updateValueFromKeyName();
keyName.setCaretPosition(offset + string.length());
}
@Override
public void replace(FilterBypass fb, int offset, int length, String text, AttributeSet attrs) throws BadLocationException {
super.replace(fb, offset, length, text, attrs);
updateValueFromKeyName();
keyName.setCaretPosition(offset + text.length());
}
}
private class KeyStrokeFilter extends DocumentFilter {
@Override
public void replace(FilterBypass fb, int offset, int length, String text, AttributeSet attrs) throws BadLocationException {
super.replace(fb, 0, keyStroke.getText().length(), text, attrs);
}
}
}
|
vassal-app/src/main/java/VASSAL/configure/NamedHotKeyConfigurer.java
|
/*
*
* Copyright (c) 2008-2020 by Rodney Kinney, Brent Easton
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License (LGPL) as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, copies are available
* at http://www.opensource.org.
*/
package VASSAL.configure;
import VASSAL.i18n.Resources;
import VASSAL.tools.NamedKeyManager;
import VASSAL.tools.NamedKeyStroke;
import VASSAL.tools.icon.IconFactory;
import VASSAL.tools.icon.IconFamily;
import VASSAL.tools.swing.SwingUtils;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JLayer;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.KeyStroke;
import javax.swing.plaf.LayerUI;
import javax.swing.text.AbstractDocument;
import javax.swing.text.AttributeSet;
import javax.swing.text.BadLocationException;
import javax.swing.text.DocumentFilter;
import net.miginfocom.swing.MigLayout;
/**
* A configurer for Configuring Key Strokes. It allows the entry of either
* a standard keystroke, or a Named command.
*
* It contains two separate Text fields, one for the Name and one for the keystroke.
* A user can fill in one or the other. Filling in one, clears the other.
*
* This Configurer has a limited undo function. Whenever one of the two fields gains focus,
* the current state of the Configurer is saved and the Undo button enabled.
* The undo button will return to the state when that field gained focus.
* This provides a one-step undo if a user accidentally types in one of the fields and
* wipes out data in the other field.
*/
public class NamedHotKeyConfigurer extends Configurer implements FocusListener {
private static final String STROKE_HINT = Resources.getString("Editor.NamedHotKeyConfigurer.keystroke");
private static final String NAME_HINT = Resources.getString("Editor.NamedHotKeyConfigurer.command");
private HintTextField keyStroke;
private HintTextField keyName;
private JPanel controls;
private String lastValue;
private JButton undoButton;
private HintTextField getKeyStroke() {
if (keyStroke == null) {
keyStroke = new HintTextField(StringConfigurer.DEFAULT_LENGTH, STROKE_HINT);
}
return keyStroke;
}
private HintTextField getKeyName() {
if (keyName == null) {
keyName = new HintTextField(StringConfigurer.DEFAULT_LENGTH, NAME_HINT);
}
return keyName;
}
public static String getFancyString(NamedKeyStroke k) {
String s = getString(k);
if (s.length() > 0) {
s = "[" + s + "]";
}
return s;
}
/**
* Return a String representation of a NamedKeyStroke
* @param k NamedKeyStroke
* @return String representation
*/
public static String getString(NamedKeyStroke k) {
return (k == null || k.isNull()) ? "" : getString(k.getStroke());
}
/**
* Return a string representation of a KeyStroke
* @param k KeyStroke
* @return String representation
*/
public static String getString(KeyStroke k) {
return NamedKeyManager.isNamed(k) ? "" : HotKeyConfigurer.getString(k);
}
public NamedHotKeyConfigurer(String key, String name, NamedKeyStroke val) {
super(key, name, val);
}
public NamedHotKeyConfigurer(String key, String name) {
this(key, name, NamedKeyStroke.NULL_KEYSTROKE);
}
public NamedHotKeyConfigurer(NamedKeyStroke val) {
this(null, null, val);
}
public NamedHotKeyConfigurer() {
this(null);
}
@Override
public String getValueString() {
return encode((NamedKeyStroke) getValue());
}
public NamedKeyStroke getValueNamedKeyStroke() {
return (NamedKeyStroke) value;
}
public boolean isNamed() {
return value != null && ((NamedKeyStroke) value).isNamed();
}
@Override
public void requestFocus() {
if (keyName != null) {
keyName.requestFocus();
}
}
@Override
public void setValue(Object o) {
setFrozen(true); // Prevent changes to the input fields triggering further updates
if (controls != null && !noUpdate) {
final NamedKeyStroke stroke = (NamedKeyStroke) o;
if (stroke.isNamed()) {
keyName.setText(stroke.getName());
keyStroke.setText("");
}
else {
keyName.setText("");
keyStroke.setText(getString(stroke));
}
updateVisibility();
}
setFrozen(false);
super.setValue(o);
}
protected void updateVisibility() {
getKeyName().setFocusOnly(isNonNullValue());
getKeyStroke().setFocusOnly(isNonNullValue());
}
private boolean isNonNullValue() {
return value != null && !((NamedKeyStroke) value).isNull();
}
@Override
public void setValue(String s) {
setValue(s == null ? null : decode(s));
}
private void updateValueFromKeyName() {
if (! isFrozen()) {
final String key = keyName.getText();
if (key.isEmpty()) {
setValue(NamedKeyStroke.NULL_KEYSTROKE);
}
else {
setValue(NamedKeyStroke.of(key));
}
}
}
@Override
public Component getControls() {
if (controls == null) {
controls = new ConfigurerPanel(getName(), "[fill,grow]", "[][fill,grow]"); // NON-NLS
keyStroke = getKeyStroke();
keyStroke.setMaximumSize(new Dimension(keyStroke.getMaximumSize().width, keyStroke.getPreferredSize().height));
keyStroke.setText(keyToString());
keyStroke.addKeyListener(new KeyStrokeAdapter());
((AbstractDocument) keyStroke.getDocument()).setDocumentFilter(new KeyStrokeFilter());
keyStroke.addFocusListener(this);
keyName = getKeyName();
keyName.setMaximumSize(new Dimension(keyName.getMaximumSize().width, keyName.getPreferredSize().height));
keyName.setText(getValueNamedKeyStroke() == null ? null : getValueNamedKeyStroke().getName());
((AbstractDocument) keyName.getDocument()).setDocumentFilter(new KeyNameFilter());
keyName.addFocusListener(this);
final JPanel panel = new JPanel(new MigLayout("ins 0", "[fill,grow]0[]0[fill,grow]0[]")); // NON-NLS
final LayerUI<JTextField> layerUI = new ConfigLayerUI(this);
final JLayer<JTextField> nameLayer = new JLayer<>(keyName, layerUI);
panel.add(nameLayer, "grow"); // NON-NLS
panel.add(new JLabel("-")); // NON-NLS
final JLayer<JTextField> keyLayer = new JLayer<>(keyStroke, layerUI);
panel.add(keyLayer, "grow"); // NON-NLS
undoButton = new JButton(IconFactory.getIcon("edit-undo", IconFamily.XSMALL)); // NON-NLS
final int size = (int) keyName.getPreferredSize().getHeight();
undoButton.setPreferredSize(new Dimension(size, size));
undoButton.setMaximumSize(new Dimension(size, size));
undoButton.addActionListener(e -> undo());
undoButton.setEnabled(false);
undoButton.setToolTipText(Resources.getString("Editor.undo"));
panel.add(undoButton);
controls.add(panel, "grow"); // NON-NLS
updateVisibility();
}
return controls;
}
private void undo() {
if (lastValue != null) {
setValue(lastValue);
lastValue = null;
undoButton.setEnabled(false);
keyName.requestFocus();
}
}
@Override
public void focusGained(FocusEvent e) {
lastValue = getValueString();
undoButton.setEnabled(true);
}
@Override
public void focusLost(FocusEvent e) {
}
public String keyToString() {
return getString((NamedKeyStroke) getValue());
}
protected boolean isPrintableAscii(char c) {
return isPrintableAscii((int) c);
}
protected boolean isPrintableAscii(int i) {
return i >= ' ' && i <= '~';
}
/**
* Decode a String into a NamedKeyStroke
*/
public static NamedKeyStroke decode(String s) {
if (s == null) {
return NamedKeyStroke.NULL_KEYSTROKE;
}
final String[] parts = s.split(",");
if (parts.length < 2) {
return NamedKeyStroke.NULL_KEYSTROKE;
}
try {
final KeyStroke stroke = KeyStroke.getKeyStroke(
Integer.parseInt(parts[0]),
Integer.parseInt(parts[1])
);
String name = null;
if (parts.length > 2) {
name = parts[2];
}
return NamedKeyStroke.of(stroke, name);
}
catch (Exception e) {
return NamedKeyStroke.NULL_KEYSTROKE;
}
}
/**
* Encode a NamedKeyStroke into a String
*/
public static String encode(NamedKeyStroke stroke) {
if (stroke == null) {
return "";
}
final KeyStroke key = stroke.getStroke();
if (key == null) {
return "";
}
String s = key.getKeyCode() + "," + key.getModifiers();
if (stroke.isNamed()) {
s += "," + stroke.getName();
}
return s;
}
@Override
public void setHighlighted(boolean highlighted) {
super.setHighlighted(highlighted);
getKeyStroke().setBackground(highlighted ? LIST_ENTRY_HIGHLIGHT_COLOR : Color.white);
getKeyName().setBackground(highlighted ? LIST_ENTRY_HIGHLIGHT_COLOR : Color.white);
getKeyStroke().repaint();
getKeyName().repaint();
}
@Override
public void addFocusListener(FocusListener listener) {
super.addFocusListener(listener);
getKeyStroke().addFocusListener(listener);
getKeyName().addFocusListener(listener);
}
@Override
public void removeFocusListener(FocusListener listener) {
super.removeFocusListener(listener);
getKeyStroke().removeFocusListener(listener);
getKeyName().removeFocusListener(listener);
}
// Use JLayer to outline the fields in Red as the Unix LaF ignores TextField background colours
private static class ConfigLayerUI extends LayerUI<JTextField> {
private static final long serialVersionUID = 1L;
private final Configurer parent;
public ConfigLayerUI(Configurer parent) {
this.parent = parent;
}
@Override
public void paint(Graphics g, JComponent c) {
super.paint(g, c);
final Component cc = ((JLayer) c).getView();
if (parent.isHighlighted()) {
final Dimension d = cc.getSize();
g.setColor(Color.red);
g.drawRect(0, 0, d.width - 2, d.height - 2);
}
}
}
private class KeyStrokeAdapter extends KeyAdapter {
@Override
public void keyPressed(KeyEvent e) {
// reportKeyEvent("KEY_PRESSED", e); // NON-NLS
switch (e.getKeyCode()) {
case KeyEvent.VK_DELETE:
case KeyEvent.VK_BACK_SPACE:
setValue(NamedKeyStroke.NULL_KEYSTROKE);
break;
case KeyEvent.VK_SHIFT:
case KeyEvent.VK_CONTROL:
case KeyEvent.VK_META:
case KeyEvent.VK_ALT:
case KeyEvent.VK_ALT_GRAPH:
case KeyEvent.VK_UNDEFINED:
break;
default:
setValue(NamedKeyStroke.of(SwingUtils.convertKeyEvent(e)));
}
}
// Repeat the Key handling for each Key of interest on release.
// This has no effect on Windows, but caters for the bizarre
// KeyEvent sequences created on MacOS.
@Override
public void keyReleased(KeyEvent e) {
// reportKeyEvent("KEY_RELEASED", e); // NON-NLS
switch (e.getKeyCode()) {
case KeyEvent.VK_DELETE:
case KeyEvent.VK_BACK_SPACE:
setValue(NamedKeyStroke.NULL_KEYSTROKE);
break;
case KeyEvent.VK_SHIFT:
case KeyEvent.VK_CONTROL:
case KeyEvent.VK_META:
case KeyEvent.VK_ALT:
case KeyEvent.VK_ALT_GRAPH:
case KeyEvent.VK_UNDEFINED:
break;
default:
setValue(NamedKeyStroke.of(SwingUtils.convertKeyEvent(e)));
}
}
}
private class KeyNameFilter extends DocumentFilter {
@Override
public void remove(FilterBypass fb, int offset, int length) throws BadLocationException {
super.remove(fb, offset, length);
updateValueFromKeyName();
keyName.setCaretPosition(offset);
}
@Override
public void insertString(FilterBypass fb, int offset, String string, AttributeSet attr) throws BadLocationException {
super.insertString(fb, offset, string, attr);
updateValueFromKeyName();
keyName.setCaretPosition(offset + string.length());
}
@Override
public void replace(FilterBypass fb, int offset, int length, String text, AttributeSet attrs) throws BadLocationException {
super.replace(fb, offset, length, text, attrs);
updateValueFromKeyName();
keyName.setCaretPosition(offset + text.length());
}
}
private class KeyStrokeFilter extends DocumentFilter {
@Override
public void replace(FilterBypass fb, int offset, int length, String text, AttributeSet attrs) throws BadLocationException {
super.replace(fb, 0, keyStroke.getText().length(), text, attrs);
}
}
}
|
Added null check.
|
vassal-app/src/main/java/VASSAL/configure/NamedHotKeyConfigurer.java
|
Added null check.
|
|
Java
|
lgpl-2.1
|
087425eb8cacfed12faaedcd6ecccad8e3873027
| 0
|
julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine
|
package org.intermine.web;
/*
* Copyright (C) 2002-2005 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.List;
import java.util.Date;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.apache.log4j.Logger;
import org.apache.poi.hssf.usermodel.*;
import org.intermine.web.config.WebConfig;
import org.intermine.web.config.TableExportConfig;
import org.intermine.web.results.PagedTable;
import org.intermine.web.results.Column;
import org.intermine.util.TextFileUtil;
import org.intermine.objectstore.ObjectStoreException;
/**
* Implementation of <strong>Action</strong> that allows the user to export a PagedTable to a file
*
* @author Kim Rutherford
*/
public class ExportAction extends InterMineAction
{
protected static final Logger LOG = Logger.getLogger(ExportAction.class);
/**
* Method called to export a PagedTable object. Uses the type request parameter to choose the
* export method.
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
* @exception Exception if the application business logic throws
* an exception
*/
public ActionForward execute(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response)
throws Exception {
String type = request.getParameter("type");
if (type.equals("excel")) {
return excel(mapping, form, request, response);
} else if (type.equals("csv")) {
return csv(mapping, form, request, response);
} else if (type.equals("tab")) {
return tab(mapping, form, request, response);
}
HttpSession session = request.getSession();
ServletContext servletContext = session.getServletContext();
WebConfig wc = (WebConfig) servletContext.getAttribute(Constants.WEBCONFIG);
TableExportConfig tableExportConfig =
(TableExportConfig) wc.getTableExportConfigs().get(type);
if (tableExportConfig == null) {
return mapping.findForward("error");
} else {
TableExporter tableExporter =
(TableExporter) Class.forName(tableExportConfig.getClassName()).newInstance();
return tableExporter.export(mapping, form, request, response);
}
}
/**
* Export the RESULTS_TABLE to Excel format by writing it to the OutputStream of the Response.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
* @exception Exception if the application business logic throws
* an exception
*/
public ActionForward excel(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
response.setContentType("Application/vnd.ms-excel");
response.setHeader("Pragma", "no-cache");
response.setHeader("Content-Disposition", "attachment; filename=\"results-table.xls\"");
PagedTable pt = SessionMethods.getResultsTable(session, request.getParameter("table"));
int defaultMax = 10000;
int maxExcelSize =
WebUtil.getIntSessionProperty(session, "max.excel.export.size", defaultMax);
if (pt.getSize() > maxExcelSize) {
ActionMessage actionMessage =
new ActionMessage("export.excelExportTooBig", new Integer(maxExcelSize));
recordError(actionMessage, request);
return mapping.getInputForward();
}
HSSFWorkbook wb = new HSSFWorkbook();
HSSFSheet sheet = wb.createSheet("results");
try {
List columns = pt.getColumns();
List rowList = pt.getAllRows();
for (int rowIndex = 0;
rowIndex < rowList.size() && rowIndex <= pt.getMaxRetrievableIndex();
rowIndex++) {
List row;
try {
row = (List) rowList.get(rowIndex);
} catch (RuntimeException e) {
// re-throw as a more specific exception
if (e.getCause() instanceof ObjectStoreException) {
throw (ObjectStoreException) e.getCause();
} else {
throw e;
}
}
HSSFRow excelRow = sheet.createRow((short) rowIndex);
// a count of the columns that we have seen so far are invisble - used to get the
// correct columnIndex for the call to createCell()
int invisibleColumns = 0;
for (int columnIndex = 0; columnIndex < row.size(); columnIndex++) {
Column thisColumn = (Column) columns.get(columnIndex);
// the column order from PagedTable.getList() isn't necessarily the order that
// the user has chosen for the columns
int realColumnIndex = thisColumn.getIndex();
if (!thisColumn.isVisible()) {
invisibleColumns++;
continue;
}
Object thisObject = row.get(realColumnIndex);
// see comment on invisibleColumns
short outputColumnIndex = (short) (columnIndex - invisibleColumns);
if (thisObject instanceof Number) {
float objectAsFloat = ((Number) thisObject).floatValue();
excelRow.createCell(outputColumnIndex).setCellValue(objectAsFloat);
} else {
if (thisObject instanceof Date) {
Date objectAsDate = (Date) thisObject;
excelRow.createCell(outputColumnIndex).setCellValue(objectAsDate);
} else {
excelRow.createCell(outputColumnIndex).setCellValue("" + thisObject);
}
}
}
}
wb.write(response.getOutputStream());
} catch (ObjectStoreException e) {
recordError(new ActionMessage("errors.query.objectstoreerror"), request, e, LOG);
}
return null;
}
/**
* Export the RESULTS_TABLE to Excel format by writing it to the OutputStream of the Response.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
* @exception Exception if the application business logic throws
* an exception
*/
public ActionForward csv(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
response.setContentType("text/comma-separated-values");
response.setHeader("Pragma", "no-cache");
response.setHeader("Content-Disposition", "inline; filename=\"results-table.csv\"");
PagedTable pt = SessionMethods.getResultsTable(session, request.getParameter("table"));
TextFileUtil.writeCSVTable(response.getOutputStream(), pt.getAllRows(),
getOrder(pt), getVisible(pt), pt.getMaxRetrievableIndex() + 1);
return null;
}
/**
* Export the RESULTS_TABLE to Excel format by writing it to the OutputStream of the Response.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
* @exception Exception if the application business logic throws
* an exception
*/
public ActionForward tab(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
response.setContentType("text/tab-separated-values");
response.setHeader("Pragma", "no-cache");
response.setHeader("Content-Disposition", "inline; filename=\"results-table.txt\"");
PagedTable pt = SessionMethods.getResultsTable(session, request.getParameter("table"));
TextFileUtil.writeTabDelimitedTable(response.getOutputStream(), pt.getAllRows(),
getOrder(pt), getVisible(pt),
pt.getMaxRetrievableIndex() + 1);
return null;
}
/**
* Return an int array containing the real column indexes to use while writing the given
* PagedTable.
* @param pt the PagedTable
*/
private static int [] getOrder(PagedTable pt) {
List columns = pt.getColumns();
int [] returnValue = new int [columns.size()];
for (int i = 0; i < columns.size(); i++) {
returnValue[i] = ((Column) columns.get(i)).getIndex();
}
return returnValue;
}
/**
* Return an array containing the visibility of each column in the output
* @param pt the PagedTable
*/
private static boolean [] getVisible(PagedTable pt) {
List columns = pt.getColumns();
boolean [] returnValue = new boolean [columns.size()];
for (int i = 0; i < columns.size(); i++) {
returnValue[i] = ((Column) columns.get(i)).isVisible();
}
return returnValue;
}
}
|
intermine/web/main/src/org/intermine/web/ExportAction.java
|
package org.intermine.web;
/*
* Copyright (C) 2002-2005 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.List;
import java.util.Date;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpSession;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.action.ActionMessage;
import org.apache.log4j.Logger;
import org.apache.poi.hssf.usermodel.*;
import org.intermine.web.config.WebConfig;
import org.intermine.web.config.TableExportConfig;
import org.intermine.web.results.PagedTable;
import org.intermine.web.results.Column;
import org.intermine.util.TextFileUtil;
import org.intermine.objectstore.ObjectStoreException;
/**
* Implementation of <strong>Action</strong> that allows the user to export a PagedTable to a file
*
* @author Kim Rutherford
*/
public class ExportAction extends InterMineAction
{
protected static final Logger LOG = Logger.getLogger(ExportAction.class);
/**
* Method called to export a PagedTable object. Uses the type request parameter to choose the
* export method.
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
* @exception Exception if the application business logic throws
* an exception
*/
public ActionForward execute(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response)
throws Exception {
String type = request.getParameter("type");
if (type.equals("excel")) {
return excel(mapping, form, request, response);
} else if (type.equals("csv")) {
return csv(mapping, form, request, response);
} else if (type.equals("tab")) {
return tab(mapping, form, request, response);
}
HttpSession session = request.getSession();
ServletContext servletContext = session.getServletContext();
WebConfig wc = (WebConfig) servletContext.getAttribute(Constants.WEBCONFIG);
TableExportConfig tableExportConfig =
(TableExportConfig) wc.getTableExportConfigs().get(type);
if (tableExportConfig == null) {
return mapping.findForward("error");
} else {
TableExporter tableExporter =
(TableExporter) Class.forName(tableExportConfig.getClassName()).newInstance();
return tableExporter.export(mapping, form, request, response);
}
}
/**
* Export the RESULTS_TABLE to Excel format by writing it to the OutputStream of the Response.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
* @exception Exception if the application business logic throws
* an exception
*/
public ActionForward excel(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
response.setContentType("Application/vnd.ms-excel");
response.setHeader("Content-Disposition ", "inline; filename=results-table.xsl");
PagedTable pt = SessionMethods.getResultsTable(session, request.getParameter("table"));
int defaultMax = 10000;
int maxExcelSize =
WebUtil.getIntSessionProperty(session, "max.excel.export.size", defaultMax);
if (pt.getSize() > maxExcelSize) {
ActionMessage actionMessage =
new ActionMessage("export.excelExportTooBig", new Integer(maxExcelSize));
recordError(actionMessage, request);
return mapping.getInputForward();
}
HSSFWorkbook wb = new HSSFWorkbook();
HSSFSheet sheet = wb.createSheet("results");
try {
List columns = pt.getColumns();
List rowList = pt.getAllRows();
for (int rowIndex = 0;
rowIndex < rowList.size() && rowIndex <= pt.getMaxRetrievableIndex();
rowIndex++) {
List row;
try {
row = (List) rowList.get(rowIndex);
} catch (RuntimeException e) {
// re-throw as a more specific exception
if (e.getCause() instanceof ObjectStoreException) {
throw (ObjectStoreException) e.getCause();
} else {
throw e;
}
}
HSSFRow excelRow = sheet.createRow((short) rowIndex);
// a count of the columns that we have seen so far are invisble - used to get the
// correct columnIndex for the call to createCell()
int invisibleColumns = 0;
for (int columnIndex = 0; columnIndex < row.size(); columnIndex++) {
Column thisColumn = (Column) columns.get(columnIndex);
// the column order from PagedTable.getList() isn't necessarily the order that
// the user has chosen for the columns
int realColumnIndex = thisColumn.getIndex();
if (!thisColumn.isVisible()) {
invisibleColumns++;
continue;
}
Object thisObject = row.get(realColumnIndex);
// see comment on invisibleColumns
short outputColumnIndex = (short) (columnIndex - invisibleColumns);
if (thisObject instanceof Number) {
float objectAsFloat = ((Number) thisObject).floatValue();
excelRow.createCell(outputColumnIndex).setCellValue(objectAsFloat);
} else {
if (thisObject instanceof Date) {
Date objectAsDate = (Date) thisObject;
excelRow.createCell(outputColumnIndex).setCellValue(objectAsDate);
} else {
excelRow.createCell(outputColumnIndex).setCellValue("" + thisObject);
}
}
}
}
wb.write(response.getOutputStream());
} catch (ObjectStoreException e) {
recordError(new ActionMessage("errors.query.objectstoreerror"), request, e, LOG);
}
return null;
}
/**
* Export the RESULTS_TABLE to Excel format by writing it to the OutputStream of the Response.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
* @exception Exception if the application business logic throws
* an exception
*/
public ActionForward csv(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
response.setContentType("text/comma-separated-values");
response.setHeader("Content-Disposition ", "inline; filename=results-table.csv");
PagedTable pt = SessionMethods.getResultsTable(session, request.getParameter("table"));
TextFileUtil.writeCSVTable(response.getOutputStream(), pt.getAllRows(),
getOrder(pt), getVisible(pt), pt.getMaxRetrievableIndex() + 1);
return null;
}
/**
* Export the RESULTS_TABLE to Excel format by writing it to the OutputStream of the Response.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
* @return an ActionForward object defining where control goes next
* @exception Exception if the application business logic throws
* an exception
*/
public ActionForward tab(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response)
throws Exception {
HttpSession session = request.getSession();
response.setContentType("text/tab-separated-values");
response.setHeader("Content-Disposition ", "inline; filename=results-table.txt");
PagedTable pt = SessionMethods.getResultsTable(session, request.getParameter("table"));
TextFileUtil.writeTabDelimitedTable(response.getOutputStream(), pt.getAllRows(),
getOrder(pt), getVisible(pt),
pt.getMaxRetrievableIndex() + 1);
return null;
}
/**
* Return an int array containing the real column indexes to use while writing the given
* PagedTable.
* @param pt the PagedTable
*/
private static int [] getOrder(PagedTable pt) {
List columns = pt.getColumns();
int [] returnValue = new int [columns.size()];
for (int i = 0; i < columns.size(); i++) {
returnValue[i] = ((Column) columns.get(i)).getIndex();
}
return returnValue;
}
/**
* Return an array containing the visibility of each column in the output
* @param pt the PagedTable
*/
private static boolean [] getVisible(PagedTable pt) {
List columns = pt.getColumns();
boolean [] returnValue = new boolean [columns.size()];
for (int i = 0; i < columns.size(); i++) {
returnValue[i] = ((Column) columns.get(i)).isVisible();
}
return returnValue;
}
}
|
fixes #405 export file name on internet explorer
Former-commit-id: c7af665edaf08db17d2834d700b6000de2e03bad
|
intermine/web/main/src/org/intermine/web/ExportAction.java
|
fixes #405 export file name on internet explorer
|
|
Java
|
lgpl-2.1
|
03f0a472116bd5ce67b16c8b6a191b1963b0c140
| 0
|
threerings/gwt-utils
|
//
// $Id$
//
// OOO GWT Utils - utilities for creating GWT applications
// Copyright (C) 2009-2010 Three Rings Design, Inc., All Rights Reserved
// http://code.google.com/p/ooo-gwt-utils/
//
// This library is free software; you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation; either version 2.1 of the License, or
// (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.threerings.gwt.util;
import java.util.Date;
import com.google.gwt.core.client.GWT;
import com.google.gwt.i18n.client.DateTimeFormat;
/**
* Time and date utility methods.
*/
public class DateUtil
{
/**
* Creates a label of the form "9:15am". TODO: support 24 hour time for people who go for that
* sort of thing. If date is null the empty string is returned.
*/
public static String formatTime (Date date)
{
return (date == null) ? "" : _tfmt.format(date).toLowerCase();
}
/**
* Formats the supplied date relative to the current time: Today, Yesterday, MMM dd, and
* finally MMM dd, YYYY. If date is null the empty string is returned.
*/
public static String formatDate (Date date)
{
return formatDate(date, true);
}
/**
* Formats the supplied date relative to the current time: Today, Yesterday, MMM dd, and
* finally MMM dd, YYYY. If date is null the empty string is returned.
*
* @param useShorthand if false, "Today" and "Yesterday" will not be used, only the month/day
* and month/day/year formats.
*/
public static String formatDate (Date date, boolean useShorthand)
{
if (date == null) {
return "";
}
Date now = new Date();
if (getYear(date) != getYear(now)) {
return _yfmt.format(date);
} else if (getMonth(date) != getMonth(now)) {
return _mfmt.format(date);
} else if (useShorthand && getDayOfMonth(date) == getDayOfMonth(now)) {
return _msgs.today();
// these will break for one hour on daylight savings time and we'll instead report the date
// in MMM dd format or we'll call two days ago yesterday for that witching hour; we don't
// have excellent date services in the browser, so we're just going to be OK with that
} else if (useShorthand && getDayOfMonth(date) ==
getDayOfMonth(new Date(now.getTime()-24*60*60*1000))) {
return _msgs.yesterday();
} else if (useShorthand && getDayOfMonth(date) ==
getDayOfMonth(new Date(now.getTime()+ONE_DAY))) {
return _msgs.tomorrow();
} else {
return _mfmt.format(date);
}
}
/**
* Creates a label of the form "{@link #formatDate} at {@link #formatTime}". If date is null
* the empty string is returned.
*/
public static String formatDateTime (Date date)
{
return (date == null) ? "" : _msgs.dateTime(formatDate(date), formatTime(date));
}
@SuppressWarnings("deprecation")
public static Date toDate (int[] datevec)
{
return new Date(datevec[0] - 1900, datevec[1], datevec[2]);
}
@SuppressWarnings("deprecation")
public static int[] toDateVec (Date date)
{
return new int[]{date.getYear() + 1900, date.getMonth(), date.getDate()};
}
@SuppressWarnings("deprecation")
public static Date newDate (String dateStr)
{
return new Date(dateStr);
}
@SuppressWarnings("deprecation")
public static int getDayOfMonth (Date date)
{
return date.getDate();
}
@SuppressWarnings("deprecation")
public static int getDayOfWeek (Date date)
{
return date.getDay();
}
@SuppressWarnings("deprecation")
public static int getMonth (Date date)
{
return date.getMonth();
}
/**
* Returns the year component of the supplied date. <em>Note:</em> this is <em>not</em> the
* year minus 1900 which the underlying {@link Date#getYear} method returns. It's the actual
* year. Returning years since 1900 is stupid.
*/
@SuppressWarnings("deprecation")
public static int getYear (Date date)
{
return date.getYear() + 1900;
}
@SuppressWarnings("deprecation")
public static void zeroTime (Date date)
{
date.setHours(0);
date.setMinutes(0);
date.setSeconds(0);
}
protected static final DateTimeFormat _tfmt = DateTimeFormat.getFormat("h:mmaa");
protected static final DateTimeFormat _mfmt = DateTimeFormat.getFormat("MMM dd");
protected static final DateTimeFormat _yfmt = DateTimeFormat.getFormat("MMM dd, yyyy");
protected static final UtilMessages _msgs = GWT.create(UtilMessages.class);
protected static final long ONE_DAY = 24*60*60*1000;
}
|
src/main/java/com/threerings/gwt/util/DateUtil.java
|
//
// $Id$
//
// OOO GWT Utils - utilities for creating GWT applications
// Copyright (C) 2009-2010 Three Rings Design, Inc., All Rights Reserved
// http://code.google.com/p/ooo-gwt-utils/
//
// This library is free software; you can redistribute it and/or modify it
// under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation; either version 2.1 of the License, or
// (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.threerings.gwt.util;
import java.util.Date;
import com.google.gwt.core.client.GWT;
import com.google.gwt.i18n.client.DateTimeFormat;
/**
* Time and date utility methods.
*/
public class DateUtil
{
/**
* Creates a label of the form "9:15am". TODO: support 24 hour time for people who go for that
* sort of thing. If date is null the empty string is returned.
*/
public static String formatTime (Date date)
{
return (date == null) ? "" : _tfmt.format(date).toLowerCase();
}
/**
* Formats the supplied date relative to the current time: Today, Yesterday, MMM dd, and
* finally MMM dd, YYYY. If date is null the empty string is returned.
*/
public static String formatDate (Date date)
{
return formatDate(date, true);
}
/**
* Formats the supplied date relative to the current time: Today, Yesterday, MMM dd, and
* finally MMM dd, YYYY. If date is null the empty string is returned.
*
* @param useShorthand if false, "Today" and "Yesterday" will not be used, only the month/day
* and month/day/year formats.
*/
public static String formatDate (Date date, boolean useShorthand)
{
if (date == null) {
return "";
}
Date now = new Date();
if (getYear(date) != getYear(now)) {
return _yfmt.format(date);
} else if (getMonth(date) != getMonth(now)) {
return _mfmt.format(date);
} else if (useShorthand && getDayOfMonth(date) == getDayOfMonth(now)) {
return _msgs.today();
// these will break for one hour on daylight savings time and we'll instead report the date
// in MMM dd format or we'll call two days ago yesterday for that witching hour; we don't
// have excellent date services in the browser, so we're just going to be OK with that
} else if (useShorthand && getDayOfMonth(date) ==
getDayOfMonth(new Date(now.getTime()-24*60*60*1000))) {
return _msgs.yesterday();
} else if (useShorthand && getDayOfMonth(date) ==
getDayOfMonth(new Date(now.getTime()+ONE_DAY))) {
return _msgs.tomorrow();
} else {
return _mfmt.format(date);
}
}
/**
* Creates a label of the form "{@link #formatDate} at {@link #formatTime}". If date is null
* the empty string is returned.
*/
public static String formatDateTime (Date date)
{
return (date == null) ? "" : _msgs.dateTime(formatDate(date), formatTime(date));
}
@SuppressWarnings("deprecation")
public static Date toDate (int[] datevec)
{
return new Date(datevec[0] - 1900, datevec[1], datevec[2]);
}
@SuppressWarnings("deprecation")
public static int[] toDateVec (Date date)
{
return new int[]{date.getYear() + 1900, date.getMonth(), date.getDate()};
}
@SuppressWarnings("deprecation")
public static Date newDate (String dateStr)
{
return new Date(dateStr);
}
@SuppressWarnings("deprecation")
public static int getDayOfMonth (Date date)
{
return date.getDate();
}
@SuppressWarnings("deprecation")
public static int getMonth (Date date)
{
return date.getMonth();
}
/**
* Returns the year component of the supplied date. <em>Note:</em> this is <em>not</em> the
* year minus 1900 which the underlying {@link Date#getYear} method returns. It's the actual
* year. Returning years since 1900 is stupid.
*/
@SuppressWarnings("deprecation")
public static int getYear (Date date)
{
return date.getYear() + 1900;
}
@SuppressWarnings("deprecation")
public static void zeroTime (Date date)
{
date.setHours(0);
date.setMinutes(0);
date.setSeconds(0);
}
protected static final DateTimeFormat _tfmt = DateTimeFormat.getFormat("h:mmaa");
protected static final DateTimeFormat _mfmt = DateTimeFormat.getFormat("MMM dd");
protected static final DateTimeFormat _yfmt = DateTimeFormat.getFormat("MMM dd, yyyy");
protected static final UtilMessages _msgs = GWT.create(UtilMessages.class);
protected static final long ONE_DAY = 24*60*60*1000;
}
|
Added getDayOfWeek to deprecation sidesteppers
|
src/main/java/com/threerings/gwt/util/DateUtil.java
|
Added getDayOfWeek to deprecation sidesteppers
|
|
Java
|
lgpl-2.1
|
d0b0215f6030fc7b40fdea825bf93830ee9aaf29
| 0
|
julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine
|
package org.intermine.web;
/*
* Copyright (C) 2002-2004 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.ServletContext;
import java.util.Properties;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
import java.util.List;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.TreeSet;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.struts.action.ActionServlet;
import org.apache.struts.action.PlugIn;
import org.apache.struts.config.ModuleConfig;
import org.intermine.util.TypeUtil;
import org.intermine.metadata.Model;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.FieldDescriptor;
import org.intermine.web.config.WebConfig;
import org.intermine.objectstore.ObjectStore;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.objectstore.ObjectStoreSummary;
import org.intermine.objectstore.ObjectStoreFactory;
/**
* Initialiser for the InterMine web application
* Anything that needs global initialisation goes here.
*
* @author Andrew Varley
*/
public class InitialiserPlugin implements PlugIn
{
private static final Logger LOG = Logger.getLogger(InitialiserPlugin.class);
ProfileManager profileManager;
/**
* Init method called at Servlet initialisation
*
* @param servlet ActionServlet that is managing all the modules
* in this web application
* @param config ModuleConfig for the module with which this
* plug-in is associated
*
* @throws ServletException if this <code>PlugIn</code> cannot
* be successfully initialized
*/
public void init(ActionServlet servlet, ModuleConfig config) throws ServletException {
ServletContext servletContext = servlet.getServletContext();
loadClassDescriptions(servletContext);
loadWebProperties(servletContext);
loadTemplateQueries(servletContext);
loadExampleQueries(servletContext);
loadWebConfig(servletContext);
ObjectStore os = null;
try {
os = ObjectStoreFactory.getObjectStore();
os.flushObjectById();
} catch (Exception e) {
throw new ServletException("Unable to instantiate ObjectStore", e);
}
servletContext.setAttribute(Constants.OBJECTSTORE, os);
loadClassCategories(servletContext, os);
processWebConfig(servletContext, os);
summarizeObjectStore(servletContext, os);
createProfileManager(servletContext, os);
}
/**
* Load the displayer configuration
*/
private void loadWebConfig(ServletContext servletContext) throws ServletException {
InputStream is = servletContext.getResourceAsStream("/WEB-INF/webconfig-model.xml");
if (is == null) {
throw new ServletException("Unable to find webconfig-model.xml");
}
try {
servletContext.setAttribute(Constants.WEBCONFIG, WebConfig.parse(is));
} catch (Exception e) {
throw new ServletException("Unable to parse webconfig-model.xml", e);
}
}
/**
* Load the user-friendly class descriptions
*/
private void loadClassDescriptions(ServletContext servletContext) throws ServletException {
InputStream is =
servletContext.getResourceAsStream("/WEB-INF/classDescriptions.properties");
if (is == null) {
return;
}
Properties classDescriptions = new Properties();
try {
classDescriptions.load(is);
} catch (Exception e) {
throw new ServletException("Error loading classDescriptions.properties", e);
}
servletContext.setAttribute("classDescriptions", classDescriptions);
}
/**
* Read the example queries into the EXAMPLE_QUERIES servlet context attribute.
*/
private void loadWebProperties(ServletContext servletContext) throws ServletException {
Properties webProperties = new Properties();
InputStream globalPropertiesStream =
servletContext.getResourceAsStream("/WEB-INF/global.web.properties");
try {
webProperties.load(globalPropertiesStream);
} catch (Exception e) {
throw new ServletException("Unable to find global.web.properties", e);
}
InputStream modelPropertiesStream =
servletContext.getResourceAsStream("/WEB-INF/web.properties");
if (modelPropertiesStream == null) {
// there are no model specific properties
} else {
try {
webProperties.load(modelPropertiesStream);
} catch (Exception e) {
throw new ServletException("Unable to find web.properties", e);
}
}
servletContext.setAttribute(Constants.WEB_PROPERTIES, webProperties);
}
/**
* Summarize the ObjectStore to get class counts
*/
private void summarizeObjectStore(ServletContext servletContext, ObjectStore os)
throws ServletException {
Properties objectStoreSummaryProperties = new Properties();
InputStream objectStoreSummaryPropertiesStream =
servletContext.getResourceAsStream("/WEB-INF/objectstoresummary.properties");
if (objectStoreSummaryPropertiesStream == null) {
// there are no model specific properties
} else {
try {
objectStoreSummaryProperties.load(objectStoreSummaryPropertiesStream);
} catch (Exception e) {
throw new ServletException("Unable to read objectstoresummary.properties", e);
}
}
ObjectStoreSummary oss = new ObjectStoreSummary(objectStoreSummaryProperties);
Model model = os.getModel();
Map classes = new LinkedHashMap();
Map classCounts = new LinkedHashMap();
for (Iterator i = new TreeSet(model.getClassNames()).iterator(); i.hasNext();) {
String className = (String) i.next();
classes.put(className, TypeUtil.unqualifiedName(className));
try {
//classCounts.put(className, new Integer(1));
classCounts.put(className, new Integer(oss.getClassCount(className)));
} catch (Exception e) {
throw new ServletException("Unable to get class count for " + className, e);
}
}
servletContext.setAttribute(Constants.OBJECT_STORE_SUMMARY, oss);
servletContext.setAttribute("classes", classes);
servletContext.setAttribute("classCounts", classCounts);
}
/**
* Read the example queries into the EXAMPLE_QUERIES servlet context attribute.
*/
private void loadExampleQueries(ServletContext servletContext) throws ServletException {
InputStream exampleQueriesStream =
servletContext.getResourceAsStream("/WEB-INF/example-queries.xml");
if (exampleQueriesStream == null) {
return;
}
Reader exampleQueriesReader = new InputStreamReader(exampleQueriesStream);
Map exampleQueries = null;
try {
exampleQueries = new PathQueryBinding().unmarshal(exampleQueriesReader);
} catch (Exception e) {
throw new ServletException("Unable to parse example-queries.xml", e);
}
servletContext.setAttribute(Constants.EXAMPLE_QUERIES, exampleQueries);
}
/**
* Read the template queries into the TEMPLATE_QUERIES servlet context attribute and create
* CATEGORY_TEMPLATES servlet context attribute that maps category name to list of templates.
*/
private void loadTemplateQueries(ServletContext servletContext) throws ServletException {
InputStream templateQueriesStream =
servletContext.getResourceAsStream("/WEB-INF/template-queries.xml");
if (templateQueriesStream == null) {
return;
}
Reader templateQueriesReader = new InputStreamReader(templateQueriesStream);
Map templateQueries = null;
Map categoryTemplates = new HashMap();
try {
templateQueries = new PathQueryBinding().unmarshal(templateQueriesReader);
} catch (Exception e) {
throw new ServletException("Unable to parse template-queries.xml", e);
}
Properties modelProperties = new Properties();
InputStream modelPropertiesStream =
servletContext.getResourceAsStream("/WEB-INF/classes/model.properties");
try {
modelProperties.load(modelPropertiesStream);
} catch (Exception e) {
throw new ServletException("Unable to find model.properties", e);
}
for (Iterator i = templateQueries.keySet().iterator(); i.hasNext();) {
String queryName = (String) i.next();
String msgKey = "templateQuery." + queryName + ".description";
String catKey = "templateQuery." + queryName + ".category";
PathQuery query = (PathQuery) templateQueries.get(queryName);
TemplateQuery template = new TemplateQuery(queryName,
modelProperties.getProperty(msgKey),
modelProperties.getProperty(catKey),
query);
templateQueries.put(queryName, template);
// Now add to list of templates associated with category
List list = (List) categoryTemplates.get(template.getCategory());
if (list == null) {
list = new ArrayList();
categoryTemplates.put(template.getCategory(), list);
}
list.add(template);
}
servletContext.setAttribute(Constants.TEMPLATE_QUERIES, templateQueries);
servletContext.setAttribute(Constants.CATEGORY_TEMPLATES, categoryTemplates);
}
/**
* Load the CATEGORY_CLASSES and CATEGORIES servlet context attribute. Loads cateogires and
* subcateogires from roperties file /WEB-INF/classCategories.properties<p>
*
* The properties file should look something like:
* <pre>
* category.0.name = People
* category.0.subcategories = Employee Manager CEO Contractor Secretary
* category.1.name = Entities
* category.1.subcategories = Bank Address Department
* </pre>
*
* If a specified class cannot be found in the model, the class is ignored and not added to
* the category.
*/
private void loadClassCategories(ServletContext servletContext, ObjectStore os)
throws ServletException {
List categories = new ArrayList();
Map subcategories = new HashMap();
InputStream in = servletContext.getResourceAsStream("/WEB-INF/classCategories.properties");
if (in == null) {
return;
}
Properties properties = new Properties();
try {
properties.load(in);
} catch (IOException err) {
throw new ServletException(err);
}
int n = 0;
String catname;
while ((catname = properties.getProperty("category." + n + ".name")) != null) {
String sc = properties.getProperty("category." + n + ".subcategories");
String subcats[] = StringUtils.split(sc, ' ');
List subcatlist = new ArrayList();
subcats = StringUtils.stripAll(subcats);
for (int i = 0; i < subcats.length; i++) {
String className = os.getModel().getPackageName() + "." + subcats[i];
if (os.getModel().hasClassDescriptor(className)) {
subcatlist.add(subcats[i]);
} else {
LOG.warn("Category \"" + catname + "\" contains unknown class \"" + subcats[i]
+ "\"");
}
}
categories.add(catname);
subcategories.put(catname, subcatlist);
n++;
}
servletContext.setAttribute(Constants.CATEGORIES, categories);
servletContext.setAttribute(Constants.CATEGORY_CLASSES, subcategories);
}
/**
* Create the DISPLAYERS ServletContext attribute by looking at the model and the WebConfig.
*/
private void processWebConfig(ServletContext servletContext, ObjectStore os)
throws ServletException {
try {
Model model = os.getModel();
WebConfig wc = (WebConfig) servletContext.getAttribute(Constants.WEBCONFIG);
Map displayersMap = new HashMap();
for (Iterator modelIter = new TreeSet(model.getClassNames()).iterator();
modelIter.hasNext();) {
String className = (String) modelIter.next();
Set cds = model.getClassDescriptorsForClass(Class.forName(className));
List cdList = new ArrayList(cds);
Map wcTypeMap = (Map) wc.getTypes();
Collections.reverse(cdList);
for (Iterator cdIter = cdList.iterator(); cdIter.hasNext(); ) {
ClassDescriptor cd = (ClassDescriptor) cdIter.next();
if (wcTypeMap.get(cd.getName()) != null) {
displayersMap.put(className, wcTypeMap.get(cd.getName()));
}
for (Iterator fdIter = cd.getFieldDescriptors().iterator(); fdIter.hasNext();) {
FieldDescriptor fd = (FieldDescriptor) fdIter.next();
String newKey = cd.getName() + " " + fd.getName();
if (wcTypeMap.get(newKey) != null) {
displayersMap.put(className + " " + fd.getName(),
wcTypeMap.get(newKey));
}
}
}
}
servletContext.setAttribute(Constants.DISPLAYERS, displayersMap);
} catch (ClassNotFoundException e) {
throw new ServletException("Unable to process webconfig", e);
}
}
private void createProfileManager(ServletContext servletContext, ObjectStore os)
throws ServletException {
try {
profileManager = new ProfileManager(os);
} catch (ObjectStoreException e) {
//throw new ServletException("Unable to create profile manager", e);
}
servletContext.setAttribute(Constants.PROFILE_MANAGER, profileManager);
}
/**
* Destroy method called at Servlet destroy
*/
public void destroy() {
profileManager.close();
}
}
|
intermine/src/java/org/intermine/web/InitialiserPlugin.java
|
package org.intermine.web;
/*
* Copyright (C) 2002-2004 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.ServletContext;
import java.util.Properties;
import java.util.Map;
import java.util.HashMap;
import java.util.Set;
import java.util.List;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.TreeSet;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.struts.action.ActionServlet;
import org.apache.struts.action.PlugIn;
import org.apache.struts.config.ModuleConfig;
import org.intermine.util.TypeUtil;
import org.intermine.metadata.Model;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.FieldDescriptor;
import org.intermine.web.config.WebConfig;
import org.intermine.objectstore.ObjectStore;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.objectstore.ObjectStoreSummary;
import org.intermine.objectstore.ObjectStoreFactory;
/**
* Initialiser for the InterMine web application
* Anything that needs global initialisation goes here.
*
* @author Andrew Varley
*/
public class InitialiserPlugin implements PlugIn
{
private static final Logger LOG = Logger.getLogger(InitialiserPlugin.class);
ProfileManager profileManager;
/**
* Init method called at Servlet initialisation
*
* @param servlet ActionServlet that is managing all the modules
* in this web application
* @param config ModuleConfig for the module with which this
* plug-in is associated
*
* @throws ServletException if this <code>PlugIn</code> cannot
* be successfully initialized
*/
public void init(ActionServlet servlet, ModuleConfig config) throws ServletException {
ServletContext servletContext = servlet.getServletContext();
loadClassDescriptions(servletContext);
loadWebProperties(servletContext);
loadTemplateQueries(servletContext);
loadExampleQueries(servletContext);
loadWebConfig(servletContext);
ObjectStore os = null;
try {
os = ObjectStoreFactory.getObjectStore();
os.flushObjectById();
} catch (Exception e) {
throw new ServletException("Unable to instantiate ObjectStore", e);
}
servletContext.setAttribute(Constants.OBJECTSTORE, os);
loadClassCategories(servletContext, os);
processWebConfig(servletContext, os);
summarizeObjectStore(servletContext, os);
createProfileManager(servletContext, os);
}
/**
* Load the displayer configuration
*/
private void loadWebConfig(ServletContext servletContext) throws ServletException {
InputStream is = servletContext.getResourceAsStream("/WEB-INF/webconfig-model.xml");
if (is == null) {
throw new ServletException("Unable to find webconfig-model.xml");
}
try {
servletContext.setAttribute(Constants.WEBCONFIG, WebConfig.parse(is));
} catch (Exception e) {
throw new ServletException("Unable to parse webconfig-model.xml", e);
}
}
/**
* Load the user-friendly class descriptions
*/
private void loadClassDescriptions(ServletContext servletContext) throws ServletException {
InputStream is =
servletContext.getResourceAsStream("/WEB-INF/classDescriptions.properties");
if (is == null) {
return;
}
Properties classDescriptions = new Properties();
try {
classDescriptions.load(is);
} catch (Exception e) {
throw new ServletException("Error loading classDescriptions.properties", e);
}
servletContext.setAttribute("classDescriptions", classDescriptions);
}
/**
* Read the example queries into the EXAMPLE_QUERIES servlet context attribute.
*/
private void loadWebProperties(ServletContext servletContext) throws ServletException {
Properties webProperties = new Properties();
InputStream globalPropertiesStream =
servletContext.getResourceAsStream("/WEB-INF/global.web.properties");
try {
webProperties.load(globalPropertiesStream);
} catch (Exception e) {
throw new ServletException("Unable to find global.web.properties", e);
}
InputStream modelPropertiesStream =
servletContext.getResourceAsStream("/WEB-INF/web.properties");
if (modelPropertiesStream == null) {
// there are no model specific properties
} else {
try {
webProperties.load(modelPropertiesStream);
} catch (Exception e) {
throw new ServletException("Unable to find web.properties", e);
}
}
servletContext.setAttribute(Constants.WEB_PROPERTIES, webProperties);
}
/**
* Summarize the ObjectStore to get class counts
*/
private void summarizeObjectStore(ServletContext servletContext, ObjectStore os)
throws ServletException {
Properties objectStoreSummaryProperties = new Properties();
InputStream objectStoreSummaryPropertiesStream =
servletContext.getResourceAsStream("/WEB-INF/objectstoresummary.properties");
if (objectStoreSummaryPropertiesStream == null) {
// there are no model specific properties
} else {
try {
objectStoreSummaryProperties.load(objectStoreSummaryPropertiesStream);
} catch (Exception e) {
throw new ServletException("Unable to read objectstoresummary.properties", e);
}
}
ObjectStoreSummary oss = new ObjectStoreSummary(objectStoreSummaryProperties);
Model model = os.getModel();
Map classes = new LinkedHashMap();
Map classCounts = new LinkedHashMap();
for (Iterator i = new TreeSet(model.getClassNames()).iterator(); i.hasNext();) {
String className = (String) i.next();
classes.put(className, TypeUtil.unqualifiedName(className));
try {
//classCounts.put(className, new Integer(1));
classCounts.put(className, new Integer(oss.getClassCount(className)));
} catch (Exception e) {
throw new ServletException("Unable to get class count for " + className, e);
}
}
servletContext.setAttribute(Constants.OBJECT_STORE_SUMMARY, oss);
servletContext.setAttribute("classes", classes);
servletContext.setAttribute("classCounts", classCounts);
}
/**
* Read the example queries into the EXAMPLE_QUERIES servlet context attribute.
*/
private void loadExampleQueries(ServletContext servletContext) throws ServletException {
InputStream exampleQueriesStream =
servletContext.getResourceAsStream("/WEB-INF/example-queries.xml");
if (exampleQueriesStream == null) {
return;
}
Reader exampleQueriesReader = new InputStreamReader(exampleQueriesStream);
Map exampleQueries = null;
try {
exampleQueries = new PathQueryBinding().unmarshal(exampleQueriesReader);
} catch (Exception e) {
throw new ServletException("Unable to parse example-queries.xml", e);
}
servletContext.setAttribute(Constants.EXAMPLE_QUERIES, exampleQueries);
}
/**
* Read the template queries into the TEMPLATE_QUERIES servlet context attribute and create
* CATEGORY_TEMPLATES servlet context attribute that maps category name to list of templates.
*/
private void loadTemplateQueries(ServletContext servletContext) throws ServletException {
InputStream templateQueriesStream =
servletContext.getResourceAsStream("/WEB-INF/template-queries.xml");
if (templateQueriesStream == null) {
return;
}
Reader templateQueriesReader = new InputStreamReader(templateQueriesStream);
Map templateQueries = null;
Map categoryTemplates = new HashMap();
try {
templateQueries = new PathQueryBinding().unmarshal(templateQueriesReader);
} catch (Exception e) {
throw new ServletException("Unable to parse template-queries.xml", e);
}
Properties modelProperties = new Properties();
InputStream modelPropertiesStream =
servletContext.getResourceAsStream("/WEB-INF/classes/model.properties");
try {
modelProperties.load(modelPropertiesStream);
} catch (Exception e) {
throw new ServletException("Unable to find model.properties", e);
}
for (Iterator i = templateQueries.keySet().iterator(); i.hasNext();) {
String queryName = (String) i.next();
String msgKey = "templateQuery." + queryName + ".description";
String catKey = "templateQuery." + queryName + ".category";
PathQuery query = (PathQuery) templateQueries.get(queryName);
TemplateQuery template = new TemplateQuery(queryName,
modelProperties.getProperty(msgKey),
modelProperties.getProperty(catKey),
query);
templateQueries.put(queryName, template);
// Now add to list of templates associated with category
List list = (List) categoryTemplates.get(template.getCategory());
if (list == null) {
list = new ArrayList();
categoryTemplates.put(template.getCategory(), list);
}
list.add(template);
}
servletContext.setAttribute(Constants.TEMPLATE_QUERIES, templateQueries);
servletContext.setAttribute(Constants.CATEGORY_TEMPLATES, categoryTemplates);
}
/**
* Load the CATEGORY_CLASSES and CATEGORIES servlet context attribute. Loads cateogires and
* subcateogires from roperties file /WEB-INF/classCategories.properties<p>
*
* The properties file should look something like:
* <pre>
* category.0.name = People
* category.0.subcategories = Employee Manager CEO Contractor Secretary
* category.1.name = Entities
* category.1.subcategories = Bank Address Department
* </pre>
*
* If a specified class cannot be found in the model, the class is ignored and not added to
* the category.
*/
private void loadClassCategories(ServletContext servletContext, ObjectStore os)
throws ServletException {
List categories = new ArrayList();
Map subcategories = new HashMap();
InputStream in = servletContext.getResourceAsStream("/WEB-INF/classCategories.properties");
if (in == null) {
return;
}
Properties properties = new Properties();
try {
properties.load(in);
} catch (IOException err) {
throw new ServletException(err);
}
int n = 0;
String catname;
while ((catname = properties.getProperty("category." + n + ".name")) != null) {
String sc = properties.getProperty("category." + n + ".subcategories");
String subcats[] = StringUtils.split(sc, ' ');
List subcatlist = new ArrayList();
subcats = StringUtils.stripAll(subcats);
for (int i = 0;i < subcats.length;i++) {
String className = os.getModel().getPackageName() + "." + subcats[i];
if (os.getModel().hasClassDescriptor(className)) {
subcatlist.add(subcats[i]);
} else {
LOG.warn("Category \"" + catname + "\" contains unknown class \"" + subcats[i]
+ "\"");
}
}
categories.add(catname);
subcategories.put(catname, subcatlist);
n++;
}
servletContext.setAttribute(Constants.CATEGORIES, categories);
servletContext.setAttribute(Constants.CATEGORY_CLASSES, subcategories);
}
/**
* Create the DISPLAYERS ServletContext attribute by looking at the model and the WebConfig.
*/
private void processWebConfig(ServletContext servletContext, ObjectStore os)
throws ServletException {
try {
Model model = os.getModel();
WebConfig wc = (WebConfig) servletContext.getAttribute(Constants.WEBCONFIG);
Map displayersMap = new HashMap();
for (Iterator modelIter = new TreeSet(model.getClassNames()).iterator();
modelIter.hasNext();) {
String className = (String) modelIter.next();
Set cds = model.getClassDescriptorsForClass(Class.forName(className));
List cdList = new ArrayList(cds);
Map wcTypeMap = (Map) wc.getTypes();
Collections.reverse(cdList);
for (Iterator cdIter = cdList.iterator(); cdIter.hasNext(); ) {
ClassDescriptor cd = (ClassDescriptor) cdIter.next();
if (wcTypeMap.get(cd.getName()) != null) {
displayersMap.put(className, wcTypeMap.get(cd.getName()));
}
for (Iterator fdIter = cd.getFieldDescriptors().iterator(); fdIter.hasNext();) {
FieldDescriptor fd = (FieldDescriptor) fdIter.next();
String newKey = cd.getName() + " " + fd.getName();
if (wcTypeMap.get(newKey) != null) {
displayersMap.put(className + " " + fd.getName(),
wcTypeMap.get(newKey));
}
}
}
}
servletContext.setAttribute(Constants.DISPLAYERS, displayersMap);
} catch (ClassNotFoundException e) {
throw new ServletException("Unable to process webconfig", e);
}
}
private void createProfileManager(ServletContext servletContext, ObjectStore os)
throws ServletException {
try {
profileManager = new ProfileManager(os);
} catch (ObjectStoreException e) {
//throw new ServletException("Unable to create profile manager", e);
}
servletContext.setAttribute(Constants.PROFILE_MANAGER, profileManager);
}
/**
* Destroy method called at Servlet destroy
*/
public void destroy() {
profileManager.close();
}
}
|
Style fixes.
Former-commit-id: 58a2cc67dd3969b3fd657d2d5d5522b0b5b59fa8
|
intermine/src/java/org/intermine/web/InitialiserPlugin.java
|
Style fixes.
|
|
Java
|
apache-2.0
|
8163553e21470987719f4eef29f4a44714a4890c
| 0
|
P7h/ProjectEuler
|
/**
* @see <a href="https://projecteuler.net/problem=3">Largest prime factor</a>
* The prime factors of 13195 are 5, 7, 13 and 29.
* What is the largest prime factor of the number 600851475143?
*/
public final class P0003 {
public static void main(final String[] args) {
long number = 600851475143L;
long primeFactor = 1L;
for (long i = 3L; i < number; i += 2) {
if (number % i == 0) {
primeFactor = i;
number /= i;
}
}
if(number > primeFactor) {
primeFactor = number;
}
System.out.println(primeFactor);
}
}
|
0003/P0003.java
|
/**
* @see <a href="https://projecteuler.net/problem=3">Largest prime factor</a>
* The prime factors of 13195 are 5, 7, 13 and 29.
* What is the largest prime factor of the number 600851475143?
*/
public final class P0003 {
public static void main(final String[] args) {
//TBD
}
}
|
Java version of Qn#3
|
0003/P0003.java
|
Java version of Qn#3
|
|
Java
|
apache-2.0
|
39c1af02425a9adaeeaff18963d0a35f2b533e83
| 0
|
hackugyo/ThoughtsCalendar_Android
|
package jp.ne.hatena.hackugyo.thoughtscalendar.ui.adapter;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import jp.ne.hatena.hackugyo.thoughtscalendar.R;
import jp.ne.hatena.hackugyo.thoughtscalendar.model.AttendStatus;
import jp.ne.hatena.hackugyo.thoughtscalendar.model.AttendingEvent;
import jp.ne.hatena.hackugyo.thoughtscalendar.ui.fragment.PlaceholderFragmentHelper;
import jp.ne.hatena.hackugyo.thoughtscalendar.util.ArrayUtils;
import jp.ne.hatena.hackugyo.thoughtscalendar.util.CalendarUtils;
import jp.ne.hatena.hackugyo.thoughtscalendar.util.StringUtils;
import android.content.Context;
import android.database.Cursor;
import android.database.DataSetObserver;
import android.support.v4.widget.CursorAdapter;
import android.text.format.Time;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageButton;
import android.widget.TextView;
import com.android.volley.toolbox.NetworkImageView;
public class PlaceholderListAdapter extends CursorAdapter {
@SuppressWarnings("unused")
private final PlaceholderListAdapter self = this;
private static final int TYPE_NORMAL = 1;
public static final int TYPE_HEADER = 0;
private LayoutInflater mInflater;
/****************************************
* Header
****************************************/
private LinkedHashMap<Integer, String> sectionsIndexer;
private int mHeaderLayoutId;
private int mColumnForGrouping;
private int mViewTypesCount = 1;
public ArrayList<AttendingEvent> mAttendingEvents;
public static class HeaderViewHolder {
public TextView textView;
}
class ViewHolder {
TextView title;
TextView begin;
TextView location;
TextView descrition;
TextView eventId;
public NetworkImageView background;
public View attendStatus;
public View attendStatusExpand;
ImageButton favoriteButton;
View showDetailButton;
}
/****************************************
* Constructor
****************************************/
/**
* @param context
* @param c
* @param autoRequery
*/
public PlaceholderListAdapter(Context context, Cursor c, boolean autoRequery) {
this(context, c, autoRequery, 0, 0, AttendingEvent.AUTHORITY_TOKYO_ART_BEAT);
}
public PlaceholderListAdapter(Context context, Cursor c, boolean autoRequery, //
int headerLayoutId, int columnForGrouping, String authority) {
super(context, c, autoRequery);
mViewTypesCount = (headerLayoutId <= 0 ? 1 : 2);
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
sectionsIndexer = new LinkedHashMap<Integer, String>();
mHeaderLayoutId = headerLayoutId;
mColumnForGrouping = columnForGrouping;
if (c != null) {
// 最初に,セクションがどこに収まるかを確認しておく.
sectionsIndexer = calculateSectionHeaders();
c.registerDataSetObserver(mDataSetObserver);
}
mAttendingEvents = ArrayUtils.asList(AttendingEvent.findEvents(context, authority));
}
/**
* データセット変更を監視.
*/
private DataSetObserver mDataSetObserver = new DataSetObserver() {
public void onChanged() {
sectionsIndexer = calculateSectionHeaders();
};
public void onInvalidated() {
sectionsIndexer.clear();
};
};
/**
* {@inheritDoc}
*/
@Override
public void bindView(View view, Context context, Cursor cursor) {
// Viewを再利用してデータをセットします
ViewHolder holder = (ViewHolder) view.getTag();
// Cursorからデータを取り出します
final String title = cursor.getString(cursor.getColumnIndexOrThrow(PlaceholderFragmentHelper.Place.KEY_TITLE));
final String begin = cursor.getString(cursor.getColumnIndexOrThrow(PlaceholderFragmentHelper.Place.KEY_BEGIN));
final String location = cursor.getString(cursor.getColumnIndexOrThrow(PlaceholderFragmentHelper.Place.KEY_EVENTLOCATION));
final String description = cursor.getString(cursor.getColumnIndexOrThrow(PlaceholderFragmentHelper.Place.KEY_DESCRIPTION));
final String eventId = cursor.getString(cursor.getColumnIndexOrThrow(PlaceholderFragmentHelper.Place.KEY_EVENTID));
// 画面にセットします
holder.title.setText(title);
holder.location.setText(location);
Time time = new Time();
time.set(Long.parseLong(begin));
holder.begin.setText(CalendarUtils.getTimeString(CalendarUtils.getInstance(time.toMillis(false))) + " 〜 ");
final boolean willAttend = AttendStatus.getAttendStatus(eventId) || includeEventId(eventId, mAttendingEvents);
holder.attendStatus.setBackgroundColor(//
context.getResources().getColor(//
willAttend ? R.color.attended_cell : android.R.color.transparent)//
);
holder.attendStatusExpand.setBackgroundColor(//
context.getResources().getColor(//
willAttend ? R.color.attended_cell : android.R.color.transparent)//
);
holder.favoriteButton.setImageResource(willAttend ? R.drawable.ic_favorite : R.drawable.ic_unfavorite);
//holder.descrition.setText(description);
}
/**
* {@inheritDoc}
*/
@Override
public View newView(Context context, Cursor cursor, ViewGroup viewGroup) {
// 新しくViewを作ります
final View view = mInflater.inflate(R.layout.list_row_placeholder, null);
ViewHolder holder = new ViewHolder();
holder.title = (TextView) view.findViewById(android.R.id.text1);
holder.location = (TextView) view.findViewById(android.R.id.text2);
holder.begin = (TextView) view.findViewById(R.id.list_row_placeholder_datetime);
holder.background = (NetworkImageView) view.findViewById(R.id.list_row_network_image_view);
holder.background.setDefaultImageResId(R.color.image_mask);
holder.attendStatus = view.findViewById(R.id.list_row_placeholder_flag);
holder.attendStatusExpand = view.findViewById(R.id.list_row_placeholder_flag_expandable);
holder.favoriteButton = (ImageButton) view.findViewById(R.id.list_row_placeholder_expandable_button_b);
holder.showDetailButton = view.findViewById(R.id.list_row_placeholder_detail);
view.setTag(holder);
return view;
}
/****************************************
* Cursor
****************************************/
@Override
public void changeCursor(Cursor cursor) {
final Cursor old = swapCursor(cursor);
if (old != null) {
old.close();
}
}
/**
* <p>
* swapCursor時にセクションヘッダを読み替える必要があるため実装.
* </p>
*/
@Override
public Cursor swapCursor(Cursor newCursor) {
if (getCursor() != null) {
getCursor().unregisterDataSetObserver(mDataSetObserver);
}
final Cursor oldCursor = super.swapCursor(newCursor);
sectionsIndexer = calculateSectionHeaders();
if (newCursor != null) {
newCursor.registerDataSetObserver(mDataSetObserver);
}
return oldCursor;
}
/****************************************
* Header view
****************************************/
@Override
public View getView(int position, View convertView, ViewGroup parent) {
int viewType = getItemViewType(position);
if (mHeaderLayoutId == 0) {
return super.getView(position, convertView, parent);
}
if (viewType == TYPE_NORMAL) {
Cursor c = (Cursor) getItem(position);
if (c == null) {
return mInflater.inflate(mHeaderLayoutId, parent, false);
}
// 通常アイテムなので,positionだけずらす
final int mapCursorPos = getSectionForPosition(position);
c.moveToPosition(mapCursorPos);
return super.getView(mapCursorPos, convertView, parent);
} else {
HeaderViewHolder holder = null;
if (convertView == null) {
holder = new HeaderViewHolder();
convertView = mInflater.inflate(mHeaderLayoutId, parent, false);
holder.textView = (TextView) convertView.findViewById(android.R.id.text1);
convertView.setTag(holder);
} else {
holder = (HeaderViewHolder) convertView.getTag();
}
TextView sectionText = holder.textView;
final String group = sectionsIndexer.get(position);
final String customFormat = getGroupCustomFormat(group);
sectionText.setText(customFormat == null ? group : customFormat);
return convertView;
}
}
private LinkedHashMap<Integer, String> calculateSectionHeaders() {
int i = 0;
String previous = "";
int count = 0;
final Cursor c = getCursor();
sectionsIndexer.clear();
if (c == null) {
return sectionsIndexer;
}
c.moveToPosition(-1);
while (c.moveToNext()) {
final String group = getCustomGroup(c.getString(mColumnForGrouping));
if (!previous.equals(group)) {
sectionsIndexer.put(i + count, group);
previous = group;
count++;
}
i++;
}
return sectionsIndexer;
}
/**
* <p>
* This method serve as an intercepter before the sections are calculated so
* you can transform some computer data into human readable, e.g. format a
* unix timestamp, or a status.
* </p>
*
* <p>
* By default this method returns the original data for the group column.
* </p>
* これの出力をもとにグルーピングを行いセクションを作る.
*
* @param groupData
* @return readable group data
*/
protected String getCustomGroup(String groupData) {
// ここで,受け取った時刻文字列を日付に変更する.
Time time = new Time();
time.set(Long.parseLong(groupData));
final String dateString = CalendarUtils.getYearMonthDateString(CalendarUtils.getInstance(time.toMillis(false)), "/");
// TODO 20140711 event数をとる方法を考える
return dateString;
}
public String getGroupCustomFormat(Object obj) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public int getViewTypeCount() {
return mViewTypesCount;
}
@Override
public int getCount() {
return super.getCount() + sectionsIndexer.size();
}
@Override
public boolean isEnabled(int position) {
return getItemViewType(position) == TYPE_NORMAL;
}
@Override
public Object getItem(int position) {
if (getItemViewType(position) == TYPE_NORMAL) {
return super.getItem(getSectionForPosition(position));
}
return super.getItem(position);
}
@Override
public long getItemId(int position) {
if (getItemViewType(position) == TYPE_NORMAL) {
return super.getItemId(getSectionForPosition(position));
}
return super.getItemId(position);
}
@Override
public int getItemViewType(int position) {
if (position == getPositionForSection(position)) {
return TYPE_NORMAL;
}
return TYPE_HEADER;
}
/**
* セクションの開始positionを返します.
*/
public int getPositionForSection(int section) {
if (sectionsIndexer.containsKey(section)) {
return section + 1;
}
return section;
}
/**
* positionが入っているセクションを返します.
*/
public int getSectionForPosition(int position) {
int offset = 0;
for (Integer key : sectionsIndexer.keySet()) {
if (position > key) {
offset++;
} else {
break;
}
}
return position - offset;
}
/**
* 指定されたpositionがCursor(実データ)でいうとどこかを返します
*
* @param position
* @return cursor上のposition
*/
public int getCursorPosition(int position) {
return getSectionForPosition(position);
}
public boolean includeEventId(String eventId) {
return includeEventId(eventId, mAttendingEvents);
}
private static boolean includeEventId(String eventId, List<AttendingEvent> attendingEvents) {
for (AttendingEvent e : attendingEvents) {
if (StringUtils.isSame(eventId, e.eventId) && e.attending) return true;
}
return false;
}
}
|
src/jp/ne/hatena/hackugyo/thoughtscalendar/ui/adapter/PlaceholderListAdapter.java
|
package jp.ne.hatena.hackugyo.thoughtscalendar.ui.adapter;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import jp.ne.hatena.hackugyo.thoughtscalendar.CustomApplication;
import jp.ne.hatena.hackugyo.thoughtscalendar.R;
import jp.ne.hatena.hackugyo.thoughtscalendar.model.AttendStatus;
import jp.ne.hatena.hackugyo.thoughtscalendar.model.AttendingEvent;
import jp.ne.hatena.hackugyo.thoughtscalendar.ui.fragment.PlaceholderFragmentHelper;
import jp.ne.hatena.hackugyo.thoughtscalendar.util.ArrayUtils;
import jp.ne.hatena.hackugyo.thoughtscalendar.util.CalendarUtils;
import jp.ne.hatena.hackugyo.thoughtscalendar.util.StringUtils;
import android.content.Context;
import android.database.Cursor;
import android.database.DataSetObserver;
import android.support.v4.widget.CursorAdapter;
import android.text.format.Time;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageButton;
import android.widget.TextView;
import com.android.volley.toolbox.ImageLoader;
import com.android.volley.toolbox.LruImageCache;
import com.android.volley.toolbox.NetworkImageView;
public class PlaceholderListAdapter extends CursorAdapter {
@SuppressWarnings("unused")
private final PlaceholderListAdapter self = this;
private static final int TYPE_NORMAL = 1;
public static final int TYPE_HEADER = 0;
private LayoutInflater mInflater;
/****************************************
* Header
****************************************/
private LinkedHashMap<Integer, String> sectionsIndexer;
private int mHeaderLayoutId;
private int mColumnForGrouping;
private int mViewTypesCount = 1;
public ArrayList<AttendingEvent> mAttendingEvents;
public static class HeaderViewHolder {
public TextView textView;
}
class ViewHolder {
TextView title;
TextView begin;
TextView location;
TextView descrition;
TextView eventId;
public NetworkImageView background;
public View attendStatus;
public View attendStatusExpand;
ImageButton favoriteButton;
View showDetailButton;
}
/****************************************
* Constructor
****************************************/
/**
* @param context
* @param c
* @param autoRequery
*/
public PlaceholderListAdapter(Context context, Cursor c, boolean autoRequery) {
this(context, c, autoRequery, 0, 0, AttendingEvent.AUTHORITY_TOKYO_ART_BEAT);
}
public PlaceholderListAdapter(Context context, Cursor c, boolean autoRequery, //
int headerLayoutId, int columnForGrouping, String authority) {
super(context, c, autoRequery);
mViewTypesCount = (headerLayoutId <= 0 ? 1 : 2);
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
sectionsIndexer = new LinkedHashMap<Integer, String>();
mHeaderLayoutId = headerLayoutId;
mColumnForGrouping = columnForGrouping;
if (c != null) {
// 最初に,セクションがどこに収まるかを確認しておく.
sectionsIndexer = calculateSectionHeaders();
c.registerDataSetObserver(mDataSetObserver);
}
mAttendingEvents = ArrayUtils.asList(AttendingEvent.findEvents(context, authority));
}
/**
* データセット変更を監視.
*/
private DataSetObserver mDataSetObserver = new DataSetObserver() {
public void onChanged() {
sectionsIndexer = calculateSectionHeaders();
};
public void onInvalidated() {
sectionsIndexer.clear();
};
};
/**
* {@inheritDoc}
*/
@Override
public void bindView(View view, Context context, Cursor cursor) {
// Viewを再利用してデータをセットします
ViewHolder holder = (ViewHolder) view.getTag();
// Cursorからデータを取り出します
final String title = cursor.getString(cursor.getColumnIndexOrThrow(PlaceholderFragmentHelper.Place.KEY_TITLE));
final String begin = cursor.getString(cursor.getColumnIndexOrThrow(PlaceholderFragmentHelper.Place.KEY_BEGIN));
final String location = cursor.getString(cursor.getColumnIndexOrThrow(PlaceholderFragmentHelper.Place.KEY_EVENTLOCATION));
final String description = cursor.getString(cursor.getColumnIndexOrThrow(PlaceholderFragmentHelper.Place.KEY_DESCRIPTION));
final String eventId = cursor.getString(cursor.getColumnIndexOrThrow(PlaceholderFragmentHelper.Place.KEY_EVENTID));
// 画面にセットします
holder.title.setText(title);
holder.location.setText(location);
Time time = new Time();
time.set(Long.parseLong(begin));
holder.begin.setText(CalendarUtils.getTimeString(CalendarUtils.getInstance(time.toMillis(false))) + " 〜 ");
final boolean willAttend = AttendStatus.getAttendStatus(eventId) || includeEventId(eventId, mAttendingEvents);
holder.attendStatus.setBackgroundColor(//
context.getResources().getColor(//
willAttend ? R.color.attended_cell : android.R.color.transparent)//
);
holder.attendStatusExpand.setBackgroundColor(//
context.getResources().getColor(//
willAttend ? R.color.attended_cell : android.R.color.transparent)//
);
holder.favoriteButton.setImageResource(willAttend ? R.drawable.ic_favorite : R.drawable.ic_unfavorite);
// TODO 画像検索APIなどを使って正しい画像を取得する,どこかでキャッシュしないとだめそう
String url = "http://htmlgiant.com/wp-content/uploads/2013/07/hegel.jpg";
switch (cursor.getPosition() % 3) {
case 0:
url = "http://philosophyforchange.files.wordpress.com/2013/06/sartre.jpeg";
break;
case 1:
url = "http://k1naka-essey.c.blog.so-net.ne.jp/_images/blog/_f88/k1naka-essey/E7ABB9E4B8ADE88BB1E5A4AAE9838EE3808CE696B0E99D92E5B9B4E3808DE698ADE5928CEFBC93E5B9B47E69C88E58FB7E794B2E8B380E4B889E9838EE3808CE791A0E79283E78E8BE381AEE791A0E79283E78E89E3808DEFBC92.jpg?c=a1";
break;
default:
break;
}
holder.background.setImageUrl(url, new ImageLoader(CustomApplication.getQueue(), new LruImageCache()));
//holder.descrition.setText(description);
}
/**
* {@inheritDoc}
*/
@Override
public View newView(Context context, Cursor cursor, ViewGroup viewGroup) {
// 新しくViewを作ります
final View view = mInflater.inflate(R.layout.list_row_placeholder, null);
ViewHolder holder = new ViewHolder();
holder.title = (TextView) view.findViewById(android.R.id.text1);
holder.location = (TextView) view.findViewById(android.R.id.text2);
holder.begin = (TextView) view.findViewById(R.id.list_row_placeholder_datetime);
holder.background = (NetworkImageView) view.findViewById(R.id.list_row_network_image_view);
holder.attendStatus = view.findViewById(R.id.list_row_placeholder_flag);
holder.attendStatusExpand = view.findViewById(R.id.list_row_placeholder_flag_expandable);
holder.favoriteButton = (ImageButton) view.findViewById(R.id.list_row_placeholder_expandable_button_b);
holder.showDetailButton = view.findViewById(R.id.list_row_placeholder_detail);
view.setTag(holder);
return view;
}
/****************************************
* Cursor
****************************************/
@Override
public void changeCursor(Cursor cursor) {
final Cursor old = swapCursor(cursor);
if (old != null) {
old.close();
}
}
/**
* <p>
* swapCursor時にセクションヘッダを読み替える必要があるため実装.
* </p>
*/
@Override
public Cursor swapCursor(Cursor newCursor) {
if (getCursor() != null) {
getCursor().unregisterDataSetObserver(mDataSetObserver);
}
final Cursor oldCursor = super.swapCursor(newCursor);
sectionsIndexer = calculateSectionHeaders();
if (newCursor != null) {
newCursor.registerDataSetObserver(mDataSetObserver);
}
return oldCursor;
}
/****************************************
* Header view
****************************************/
@Override
public View getView(int position, View convertView, ViewGroup parent) {
int viewType = getItemViewType(position);
if (mHeaderLayoutId == 0) {
return super.getView(position, convertView, parent);
}
if (viewType == TYPE_NORMAL) {
Cursor c = (Cursor) getItem(position);
if (c == null) {
return mInflater.inflate(mHeaderLayoutId, parent, false);
}
// 通常アイテムなので,positionだけずらす
final int mapCursorPos = getSectionForPosition(position);
c.moveToPosition(mapCursorPos);
return super.getView(mapCursorPos, convertView, parent);
} else {
HeaderViewHolder holder = null;
if (convertView == null) {
holder = new HeaderViewHolder();
convertView = mInflater.inflate(mHeaderLayoutId, parent, false);
holder.textView = (TextView) convertView.findViewById(android.R.id.text1);
convertView.setTag(holder);
} else {
holder = (HeaderViewHolder) convertView.getTag();
}
TextView sectionText = holder.textView;
final String group = sectionsIndexer.get(position);
final String customFormat = getGroupCustomFormat(group);
sectionText.setText(customFormat == null ? group : customFormat);
return convertView;
}
}
private LinkedHashMap<Integer, String> calculateSectionHeaders() {
int i = 0;
String previous = "";
int count = 0;
final Cursor c = getCursor();
sectionsIndexer.clear();
if (c == null) {
return sectionsIndexer;
}
c.moveToPosition(-1);
while (c.moveToNext()) {
final String group = getCustomGroup(c.getString(mColumnForGrouping));
if (!previous.equals(group)) {
sectionsIndexer.put(i + count, group);
previous = group;
count++;
}
i++;
}
return sectionsIndexer;
}
/**
* <p>
* This method serve as an intercepter before the sections are calculated so
* you can transform some computer data into human readable, e.g. format a
* unix timestamp, or a status.
* </p>
*
* <p>
* By default this method returns the original data for the group column.
* </p>
* これの出力をもとにグルーピングを行いセクションを作る.
*
* @param groupData
* @return readable group data
*/
protected String getCustomGroup(String groupData) {
// ここで,受け取った時刻文字列を日付に変更する.
Time time = new Time();
time.set(Long.parseLong(groupData));
final String dateString = CalendarUtils.getYearMonthDateString(CalendarUtils.getInstance(time.toMillis(false)), "/");
// TODO 20140711 event数をとる方法を考える
return dateString;
}
public String getGroupCustomFormat(Object obj) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public int getViewTypeCount() {
return mViewTypesCount;
}
@Override
public int getCount() {
return super.getCount() + sectionsIndexer.size();
}
@Override
public boolean isEnabled(int position) {
return getItemViewType(position) == TYPE_NORMAL;
}
@Override
public Object getItem(int position) {
if (getItemViewType(position) == TYPE_NORMAL) {
return super.getItem(getSectionForPosition(position));
}
return super.getItem(position);
}
@Override
public long getItemId(int position) {
if (getItemViewType(position) == TYPE_NORMAL) {
return super.getItemId(getSectionForPosition(position));
}
return super.getItemId(position);
}
@Override
public int getItemViewType(int position) {
if (position == getPositionForSection(position)) {
return TYPE_NORMAL;
}
return TYPE_HEADER;
}
/**
* セクションの開始positionを返します.
*/
public int getPositionForSection(int section) {
if (sectionsIndexer.containsKey(section)) {
return section + 1;
}
return section;
}
/**
* positionが入っているセクションを返します.
*/
public int getSectionForPosition(int position) {
int offset = 0;
for (Integer key : sectionsIndexer.keySet()) {
if (position > key) {
offset++;
} else {
break;
}
}
return position - offset;
}
/**
* 指定されたpositionがCursor(実データ)でいうとどこかを返します
*
* @param position
* @return cursor上のposition
*/
public int getCursorPosition(int position) {
return getSectionForPosition(position);
}
public boolean includeEventId(String eventId) {
return includeEventId(eventId, mAttendingEvents);
}
private static boolean includeEventId(String eventId, List<AttendingEvent> attendingEvents) {
for (AttendingEvent e : attendingEvents) {
if (StringUtils.isSame(eventId, e.eventId) && e.attending) return true;
}
return false;
}
}
|
不要な画像はいったん外す
|
src/jp/ne/hatena/hackugyo/thoughtscalendar/ui/adapter/PlaceholderListAdapter.java
|
不要な画像はいったん外す
|
|
Java
|
apache-2.0
|
3f81bc566810ee92d979506a6334bf1efd624cf5
| 0
|
mreutegg/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,trekawek/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak,mreutegg/jackrabbit-oak,anchela/jackrabbit-oak,amit-jain/jackrabbit-oak,anchela/jackrabbit-oak,trekawek/jackrabbit-oak,trekawek/jackrabbit-oak,trekawek/jackrabbit-oak,anchela/jackrabbit-oak,mreutegg/jackrabbit-oak,amit-jain/jackrabbit-oak,apache/jackrabbit-oak,anchela/jackrabbit-oak,amit-jain/jackrabbit-oak,apache/jackrabbit-oak,amit-jain/jackrabbit-oak,apache/jackrabbit-oak,trekawek/jackrabbit-oak,apache/jackrabbit-oak,amit-jain/jackrabbit-oak
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.jcr.AccessDeniedException;
import javax.jcr.Credentials;
import javax.jcr.Item;
import javax.jcr.ItemNotFoundException;
import javax.jcr.NamespaceException;
import javax.jcr.Node;
import javax.jcr.PathNotFoundException;
import javax.jcr.Property;
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.UnsupportedRepositoryOperationException;
import javax.jcr.ValueFactory;
import javax.jcr.Workspace;
import javax.jcr.retention.RetentionManager;
import javax.jcr.security.AccessControlException;
import javax.jcr.security.AccessControlManager;
import javax.jcr.security.AccessControlPolicy;
import javax.jcr.security.AccessControlPolicyIterator;
import javax.jcr.security.Privilege;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.apache.jackrabbit.api.security.principal.PrincipalManager;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.apache.jackrabbit.commons.AbstractSession;
import org.apache.jackrabbit.commons.iterator.AccessControlPolicyIteratorAdapter;
import org.apache.jackrabbit.oak.api.TreeLocation;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.jcr.xml.XmlImportHandler;
import org.apache.jackrabbit.oak.spi.security.authentication.ImpersonationCredentials;
import org.apache.jackrabbit.oak.util.TODO;
import org.apache.jackrabbit.util.Text;
import org.apache.jackrabbit.util.XMLChar;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.ContentHandler;
/**
* {@code SessionImpl}...
*/
public class SessionImpl extends AbstractSession implements JackrabbitSession {
/**
* logger instance
*/
private static final Logger log = LoggerFactory.getLogger(SessionImpl.class);
private final SessionDelegate dlg;
/**
* Local namespace remappings. Prefixes as keys and namespace URIs as values.
* <p>
* This map is only accessed from synchronized methods (see
* <a href="https://issues.apache.org/jira/browse/JCR-1793">JCR-1793</a>).
*/
private final Map<String, String> namespaces;
SessionImpl(SessionDelegate dlg, Map<String, String> namespaces) {
this.dlg = dlg;
this.namespaces = namespaces;
}
//------------------------------------------------------------< Session >---
@Override
@Nonnull
public Repository getRepository() {
return dlg.getRepository();
}
@Override
public String getUserID() {
return dlg.getAuthInfo().getUserID();
}
@Override
public String[] getAttributeNames() {
return dlg.getAuthInfo().getAttributeNames();
}
@Override
public Object getAttribute(String name) {
return dlg.getAuthInfo().getAttribute(name);
}
@Override
@Nonnull
public Workspace getWorkspace() {
return dlg.getWorkspace();
}
@Override
@Nonnull
public Session impersonate(Credentials credentials) throws RepositoryException {
ensureIsAlive();
ImpersonationCredentials impCreds = new ImpersonationCredentials(credentials, dlg.getAuthInfo());
return getRepository().login(impCreds, dlg.getWorkspaceName());
}
@Override
@Nonnull
public ValueFactory getValueFactory() throws RepositoryException {
ensureIsAlive();
return dlg.getValueFactory();
}
@Override
@Nonnull
public Node getRootNode() throws RepositoryException {
ensureIsAlive();
return dlg.perform(new SessionOperation<NodeImpl>() {
@Override
public NodeImpl perform() throws AccessDeniedException {
NodeDelegate nd = dlg.getRootNode();
if (nd == null) {
throw new AccessDeniedException("Root node is not accessible.");
} else {
return new NodeImpl<NodeDelegate>(nd);
}
}
});
}
@Override
@Nonnull
public Node getNodeByUUID(String uuid) throws RepositoryException {
return getNodeByIdentifier(uuid);
}
@Override
@Nonnull
public Node getNodeByIdentifier(final String id) throws RepositoryException {
ensureIsAlive();
return dlg.perform(new SessionOperation<NodeImpl>() {
@Override
public NodeImpl perform() throws RepositoryException {
NodeDelegate d = dlg.getNodeByIdentifier(id);
if (d == null) {
throw new ItemNotFoundException("Node with id " + id + " does not exist.");
}
return new NodeImpl<NodeDelegate>(d);
}
});
}
@Override
public Item getItem(String absPath) throws RepositoryException {
if (nodeExists(absPath)) {
return getNode(absPath);
} else {
return getProperty(absPath);
}
}
@Override
public boolean itemExists(String absPath) throws RepositoryException {
return nodeExists(absPath) || propertyExists(absPath);
}
@Override
public Node getNode(final String absPath) throws RepositoryException {
ensureIsAlive();
return dlg.perform(new SessionOperation<NodeImpl>() {
@Override
public NodeImpl perform() throws RepositoryException {
String oakPath = dlg.getOakPathOrThrow(absPath);
NodeDelegate d = dlg.getNode(oakPath);
if (d == null) {
throw new PathNotFoundException("Node with path " + absPath + " does not exist.");
}
return new NodeImpl<NodeDelegate>(d);
}
});
}
@Override
public boolean nodeExists(final String absPath) throws RepositoryException {
ensureIsAlive();
return dlg.perform(new SessionOperation<Boolean>() {
@Override
public Boolean perform() throws RepositoryException {
String oakPath = dlg.getOakPathOrThrow(absPath);
return dlg.getNode(oakPath) != null;
}
});
}
@Override
public Property getProperty(final String absPath) throws RepositoryException {
if (absPath.equals("/")) {
throw new RepositoryException("The root node is not a property");
} else {
return dlg.perform(new SessionOperation<PropertyImpl>() {
@Override
public PropertyImpl perform() throws RepositoryException {
String oakPath = dlg.getOakPathOrThrowNotFound(absPath);
TreeLocation loc = dlg.getLocation(oakPath);
if (loc.getProperty() == null) {
throw new PathNotFoundException(absPath);
}
else {
return new PropertyImpl(new PropertyDelegate(dlg, loc));
}
}
});
}
}
@Override
public boolean propertyExists(final String absPath) throws RepositoryException {
if (absPath.equals("/")) {
throw new RepositoryException("The root node is not a property");
} else {
return dlg.perform(new SessionOperation<Boolean>() {
@Override
public Boolean perform() throws RepositoryException {
String oakPath = dlg.getOakPathOrThrowNotFound(absPath);
TreeLocation loc = dlg.getLocation(oakPath);
return loc.getProperty() != null;
}
});
}
}
@Override
public void move(final String srcAbsPath, final String destAbsPath) throws RepositoryException {
ensureIsAlive();
dlg.perform(new SessionOperation<Void>() {
@Override
public Void perform() throws RepositoryException {
dlg.checkProtectedNodes(Text.getRelativeParent(srcAbsPath, 1), Text.getRelativeParent(destAbsPath, 1));
String oakPath = dlg.getOakPathKeepIndexOrThrowNotFound(destAbsPath);
String oakName = PathUtils.getName(oakPath);
// handle index
if (oakName.contains("[")) {
throw new RepositoryException("Cannot create a new node using a name including an index");
}
dlg.move(
dlg.getOakPathOrThrowNotFound(srcAbsPath),
dlg.getOakPathOrThrowNotFound(oakPath),
true);
return null;
}
});
}
@Override
public void save() throws RepositoryException {
ensureIsAlive();
dlg.save();
}
@Override
public void refresh(boolean keepChanges) throws RepositoryException {
ensureIsAlive();
dlg.refresh(keepChanges);
}
@Override
public boolean hasPendingChanges() throws RepositoryException {
ensureIsAlive();
return dlg.hasPendingChanges();
}
@Override
public boolean isLive() {
return dlg.isAlive();
}
@Override
public void logout() {
dlg.logout();
synchronized (namespaces) {
namespaces.clear();
}
}
@Override
@Nonnull
public ContentHandler getImportContentHandler(
String parentAbsPath, int uuidBehavior) throws RepositoryException {
final Node parent = getNode(parentAbsPath);
return new XmlImportHandler(parent, uuidBehavior);
}
/**
* @see javax.jcr.Session#addLockToken(String)
*/
@Override
public void addLockToken(String lt) {
try {
dlg.getLockManager().addLockToken(lt);
} catch (RepositoryException e) {
log.warn("Unable to add lock token '{}' to this session: {}", lt, e.getMessage());
}
}
/**
* @see javax.jcr.Session#getLockTokens()
*/
@Override
@Nonnull
public String[] getLockTokens() {
try {
return dlg.getLockManager().getLockTokens();
} catch (RepositoryException e) {
log.warn("Unable to retrieve lock tokens for this session: {}", e.getMessage());
return new String[0]; }
}
/**
* @see javax.jcr.Session#removeLockToken(String)
*/
@Override
public void removeLockToken(String lt) {
try {
dlg.getLockManager().addLockToken(lt);
} catch (RepositoryException e) {
log.warn("Unable to add lock token '{}' to this session: {}", lt, e.getMessage());
}
}
@Override
public boolean hasPermission(String absPath, String actions) throws RepositoryException {
ensureIsAlive();
String oakPath = dlg.getOakPathOrNull(absPath);
if (oakPath == null) {
// TODO should we throw an exception here?
return TODO.unimplemented().returnValue(false);
}
// TODO implement hasPermission
return TODO.unimplemented().returnValue(true);
}
/**
* @see javax.jcr.Session#checkPermission(String, String)
*/
@Override
public void checkPermission(String absPath, String actions) throws AccessControlException, RepositoryException {
if (!hasPermission(absPath, actions)) {
throw new AccessControlException("Access control violation: path = " + absPath + ", actions = " + actions);
}
}
@Override
public boolean hasCapability(String methodName, Object target, Object[] arguments) throws RepositoryException {
ensureIsAlive();
// TODO
return false;
}
@Override
@Nonnull
public AccessControlManager getAccessControlManager()
throws RepositoryException {
return TODO.unimplemented().returnValue(new AccessControlManager() {
@Override
public void setPolicy(String absPath, AccessControlPolicy policy) throws AccessControlException {
throw new AccessControlException(policy.toString());
}
@Override
public void removePolicy(String absPath, AccessControlPolicy policy) throws AccessControlException {
throw new AccessControlException(policy.toString());
}
@Override
public Privilege privilegeFromName(String privilegeName)
throws AccessControlException, RepositoryException {
return dlg.getPrivilegeManager().getPrivilege(privilegeName);
}
@Override
public boolean hasPrivileges(String absPath, Privilege[] privileges) {
return true;
}
@Override
public Privilege[] getSupportedPrivileges(String absPath) {
return new Privilege[0];
}
@Override
public Privilege[] getPrivileges(String absPath) {
return new Privilege[0];
}
@Override
public AccessControlPolicy[] getPolicies(String absPath) {
return new AccessControlPolicy[0];
}
@Override
public AccessControlPolicy[] getEffectivePolicies(String absPath) {
return new AccessControlPolicy[0];
}
@Override
public AccessControlPolicyIterator getApplicablePolicies(String absPath) {
return AccessControlPolicyIteratorAdapter.EMPTY;
}
});
}
/**
* @see javax.jcr.Session#getRetentionManager()
*/
@Override
@Nonnull
public RetentionManager getRetentionManager() throws RepositoryException {
throw new UnsupportedRepositoryOperationException("Retention Management is not supported.");
}
//---------------------------------------------------------< Namespaces >---
// The code below was initially copied from JCR Commons AbstractSession, but
// provides information the "hasRemappings" information
@Override
public void setNamespacePrefix(String prefix, String uri) throws RepositoryException {
if (prefix == null) {
throw new IllegalArgumentException("Prefix must not be null");
} else if (uri == null) {
throw new IllegalArgumentException("Namespace must not be null");
} else if (prefix.isEmpty()) {
throw new NamespaceException(
"Empty prefix is reserved and can not be remapped");
} else if (uri.isEmpty()) {
throw new NamespaceException(
"Default namespace is reserved and can not be remapped");
} else if (prefix.toLowerCase(Locale.ENGLISH).startsWith("xml")) {
throw new NamespaceException(
"XML prefixes are reserved: " + prefix);
} else if (!XMLChar.isValidNCName(prefix)) {
throw new NamespaceException(
"Prefix is not a valid XML NCName: " + prefix);
}
synchronized (namespaces) {
// Remove existing mapping for the given prefix
namespaces.remove(prefix);
// Remove existing mapping(s) for the given URI
Set<String> prefixes = new HashSet<String>();
for (Map.Entry<String, String> entry : namespaces.entrySet()) {
if (entry.getValue().equals(uri)) {
prefixes.add(entry.getKey());
}
}
namespaces.keySet().removeAll(prefixes);
// Add the new mapping
namespaces.put(prefix, uri);
}
}
@Override
public String[] getNamespacePrefixes() throws RepositoryException {
Set<String> uris = new HashSet<String>();
uris.addAll(Arrays.asList(getWorkspace().getNamespaceRegistry().getURIs()));
synchronized (namespaces) {
// Add namespace uris only visible to session
uris.addAll(namespaces.values());
}
Set<String> prefixes = new HashSet<String>();
for (String uri : uris) {
prefixes.add(getNamespacePrefix(uri));
}
return prefixes.toArray(new String[prefixes.size()]);
}
@Override
public String getNamespaceURI(String prefix) throws RepositoryException {
synchronized (namespaces) {
String uri = namespaces.get(prefix);
if (uri == null) {
// Not in local mappings, try the global ones
uri = getWorkspace().getNamespaceRegistry().getURI(prefix);
if (namespaces.containsValue(uri)) {
// The global URI is locally mapped to some other prefix,
// so there are no mappings for this prefix
throw new NamespaceException("Namespace not found: " + prefix);
}
}
return uri;
}
}
@Override
public String getNamespacePrefix(String uri) throws RepositoryException {
synchronized (namespaces) {
for (Map.Entry<String, String> entry : namespaces.entrySet()) {
if (entry.getValue().equals(uri)) {
return entry.getKey();
}
}
// The following throws an exception if the URI is not found, that's OK
String prefix = getWorkspace().getNamespaceRegistry().getPrefix(uri);
// Generate a new prefix if the global mapping is already taken
String base = prefix;
for (int i = 2; namespaces.containsKey(prefix); i++) {
prefix = base + i;
}
if (!base.equals(prefix)) {
namespaces.put(prefix, uri);
}
return prefix;
}
}
//--------------------------------------------------< JackrabbitSession >---
@Override
@Nonnull
public PrincipalManager getPrincipalManager() throws RepositoryException {
return dlg.getPrincipalManager();
}
@Override
@Nonnull
public UserManager getUserManager() throws RepositoryException {
return dlg.getUserManager();
}
//------------------------------------------------------------< private >---
/**
* Ensure that this session is alive and throw an exception otherwise.
*
* @throws RepositoryException if this session has been rendered invalid
* for some reason (e.g. if this session has been closed explicitly by logout)
*/
private void ensureIsAlive() throws RepositoryException {
// check session status
if (!dlg.isAlive()) {
throw new RepositoryException("This session has been closed.");
}
}
}
|
oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/SessionImpl.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nonnull;
import javax.jcr.AccessDeniedException;
import javax.jcr.Credentials;
import javax.jcr.Item;
import javax.jcr.ItemNotFoundException;
import javax.jcr.NamespaceException;
import javax.jcr.Node;
import javax.jcr.PathNotFoundException;
import javax.jcr.Property;
import javax.jcr.Repository;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.UnsupportedRepositoryOperationException;
import javax.jcr.ValueFactory;
import javax.jcr.Workspace;
import javax.jcr.retention.RetentionManager;
import javax.jcr.security.AccessControlException;
import javax.jcr.security.AccessControlManager;
import javax.jcr.security.AccessControlPolicy;
import javax.jcr.security.AccessControlPolicyIterator;
import javax.jcr.security.Privilege;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.apache.jackrabbit.api.security.principal.PrincipalManager;
import org.apache.jackrabbit.api.security.user.UserManager;
import org.apache.jackrabbit.commons.AbstractSession;
import org.apache.jackrabbit.commons.iterator.AccessControlPolicyIteratorAdapter;
import org.apache.jackrabbit.oak.api.TreeLocation;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.jcr.xml.XmlImportHandler;
import org.apache.jackrabbit.oak.spi.security.authentication.ImpersonationCredentials;
import org.apache.jackrabbit.oak.util.TODO;
import org.apache.jackrabbit.util.Text;
import org.apache.jackrabbit.util.XMLChar;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.ContentHandler;
/**
* {@code SessionImpl}...
*/
public class SessionImpl extends AbstractSession implements JackrabbitSession {
/**
* logger instance
*/
private static final Logger log = LoggerFactory.getLogger(SessionImpl.class);
private final SessionDelegate dlg;
/**
* Local namespace remappings. Prefixes as keys and namespace URIs as values.
* <p>
* This map is only accessed from synchronized methods (see
* <a href="https://issues.apache.org/jira/browse/JCR-1793">JCR-1793</a>).
*/
private final Map<String, String> namespaces;
SessionImpl(SessionDelegate dlg, Map<String, String> namespaces) {
this.dlg = dlg;
this.namespaces = namespaces;
}
//------------------------------------------------------------< Session >---
@Override
@Nonnull
public Repository getRepository() {
return dlg.getRepository();
}
@Override
public String getUserID() {
return dlg.getAuthInfo().getUserID();
}
@Override
public String[] getAttributeNames() {
return dlg.getAuthInfo().getAttributeNames();
}
@Override
public Object getAttribute(String name) {
return dlg.getAuthInfo().getAttribute(name);
}
@Override
@Nonnull
public Workspace getWorkspace() {
return dlg.getWorkspace();
}
@Override
@Nonnull
public Session impersonate(Credentials credentials) throws RepositoryException {
ensureIsAlive();
ImpersonationCredentials impCreds = new ImpersonationCredentials(credentials, dlg.getAuthInfo());
return getRepository().login(impCreds, dlg.getWorkspaceName());
}
@Override
@Nonnull
public ValueFactory getValueFactory() throws RepositoryException {
ensureIsAlive();
return dlg.getValueFactory();
}
@Override
@Nonnull
public Node getRootNode() throws RepositoryException {
ensureIsAlive();
return dlg.perform(new SessionOperation<NodeImpl>() {
@Override
public NodeImpl perform() throws AccessDeniedException {
NodeDelegate nd = dlg.getRootNode();
if (nd == null) {
throw new AccessDeniedException("Root node is not accessible.");
} else {
return new NodeImpl<NodeDelegate>(nd);
}
}
});
}
@Override
@Nonnull
public Node getNodeByUUID(String uuid) throws RepositoryException {
return getNodeByIdentifier(uuid);
}
@Override
@Nonnull
public Node getNodeByIdentifier(final String id) throws RepositoryException {
ensureIsAlive();
return dlg.perform(new SessionOperation<NodeImpl>() {
@Override
public NodeImpl perform() throws RepositoryException {
NodeDelegate d = dlg.getNodeByIdentifier(id);
if (d == null) {
throw new ItemNotFoundException("Node with id " + id + " does not exist.");
}
return new NodeImpl<NodeDelegate>(d);
}
});
}
@Override
public Item getItem(String absPath) throws RepositoryException {
if (nodeExists(absPath)) {
return getNode(absPath);
} else {
return getProperty(absPath);
}
}
@Override
public boolean itemExists(String absPath) throws RepositoryException {
return nodeExists(absPath) || propertyExists(absPath);
}
@Override
public Node getNode(final String absPath) throws RepositoryException {
ensureIsAlive();
return dlg.perform(new SessionOperation<NodeImpl>() {
@Override
public NodeImpl perform() throws RepositoryException {
String oakPath = dlg.getOakPathOrThrow(absPath);
NodeDelegate d = dlg.getNode(oakPath);
if (d == null) {
throw new PathNotFoundException("Node with path " + absPath + " does not exist.");
}
return new NodeImpl<NodeDelegate>(d);
}
});
}
@Override
public boolean nodeExists(final String absPath) throws RepositoryException {
ensureIsAlive();
return dlg.perform(new SessionOperation<Boolean>() {
@Override
public Boolean perform() throws RepositoryException {
String oakPath = dlg.getOakPathOrThrow(absPath);
return dlg.getNode(oakPath) != null;
}
});
}
@Override
public Property getProperty(final String absPath) throws RepositoryException {
if (absPath.equals("/")) {
throw new RepositoryException("The root node is not a property");
} else {
return dlg.perform(new SessionOperation<PropertyImpl>() {
@Override
public PropertyImpl perform() throws RepositoryException {
String oakPath = dlg.getOakPathOrThrowNotFound(absPath);
TreeLocation loc = dlg.getLocation(oakPath);
if (loc.getProperty() == null) {
throw new PathNotFoundException(absPath);
}
else {
return new PropertyImpl(new PropertyDelegate(dlg, loc));
}
}
});
}
}
@Override
public boolean propertyExists(final String absPath) throws RepositoryException {
if (absPath.equals("/")) {
throw new RepositoryException("The root node is not a property");
} else {
return dlg.perform(new SessionOperation<Boolean>() {
@Override
public Boolean perform() throws RepositoryException {
String oakPath = dlg.getOakPathOrThrowNotFound(absPath);
TreeLocation loc = dlg.getLocation(oakPath);
return loc.getProperty() != null;
}
});
}
}
@Override
public void move(final String srcAbsPath, final String destAbsPath) throws RepositoryException {
ensureIsAlive();
dlg.perform(new SessionOperation<Void>() {
@Override
public Void perform() throws RepositoryException {
dlg.checkProtectedNodes(Text.getRelativeParent(srcAbsPath, 1), Text.getRelativeParent(destAbsPath, 1));
String oakPath = dlg.getOakPathKeepIndexOrThrowNotFound(destAbsPath);
String oakName = PathUtils.getName(oakPath);
// handle index
if (oakName.contains("[")) {
throw new RepositoryException("Cannot create a new node using a name including an index");
}
dlg.move(
dlg.getOakPathOrThrowNotFound(srcAbsPath),
dlg.getOakPathOrThrowNotFound(oakPath),
true);
return null;
}
});
}
@Override
public void save() throws RepositoryException {
ensureIsAlive();
dlg.save();
}
@Override
public void refresh(boolean keepChanges) throws RepositoryException {
ensureIsAlive();
dlg.refresh(keepChanges);
}
@Override
public boolean hasPendingChanges() throws RepositoryException {
ensureIsAlive();
return dlg.hasPendingChanges();
}
@Override
public boolean isLive() {
return dlg.isAlive();
}
@Override
public void logout() {
dlg.logout();
synchronized (namespaces) {
namespaces.clear();
}
}
@Override
@Nonnull
public ContentHandler getImportContentHandler(
String parentAbsPath, int uuidBehavior) throws RepositoryException {
final Node parent = getNode(parentAbsPath);
return new XmlImportHandler(parent, uuidBehavior);
}
/**
* @see javax.jcr.Session#addLockToken(String)
*/
@Override
public void addLockToken(String lt) {
try {
dlg.getLockManager().addLockToken(lt);
} catch (RepositoryException e) {
log.warn("Unable to add lock token '{}' to this session: {}", lt, e.getMessage());
}
}
/**
* @see javax.jcr.Session#getLockTokens()
*/
@Override
@Nonnull
public String[] getLockTokens() {
try {
return dlg.getLockManager().getLockTokens();
} catch (RepositoryException e) {
log.warn("Unable to retrieve lock tokens for this session: {}", e.getMessage());
return new String[0]; }
}
/**
* @see javax.jcr.Session#removeLockToken(String)
*/
@Override
public void removeLockToken(String lt) {
try {
dlg.getLockManager().addLockToken(lt);
} catch (RepositoryException e) {
log.warn("Unable to add lock token '{}' to this session: {}", lt, e.getMessage());
}
}
@Override
public boolean hasPermission(String absPath, String actions) throws RepositoryException {
ensureIsAlive();
String oakPath = dlg.getOakPathOrNull(absPath);
if (oakPath == null) {
// TODO should we throw an exception here?
return TODO.unimplemented().returnValue(false);
}
// TODO implement hasPermission
return TODO.unimplemented().returnValue(true);
}
/**
* @see javax.jcr.Session#checkPermission(String, String)
*/
@Override
public void checkPermission(String absPath, String actions) throws AccessControlException, RepositoryException {
if (!hasPermission(absPath, actions)) {
throw new AccessControlException("Access control violation: path = " + absPath + ", actions = " + actions);
}
}
@Override
public boolean hasCapability(String methodName, Object target, Object[] arguments) throws RepositoryException {
ensureIsAlive();
// TODO
return false;
}
@Override
@Nonnull
public AccessControlManager getAccessControlManager()
throws RepositoryException {
return TODO.unimplemented().returnValue(new AccessControlManager() {
@Override
public void setPolicy(String absPath, AccessControlPolicy policy) throws AccessControlException {
throw new AccessControlException(policy.toString());
}
@Override
public void removePolicy(String absPath, AccessControlPolicy policy) throws AccessControlException {
throw new AccessControlException(policy.toString());
}
@Override
public Privilege privilegeFromName(String privilegeName)
throws AccessControlException, RepositoryException {
return dlg.getPrivilegeManager().getPrivilege(privilegeName);
}
@Override
public boolean hasPrivileges(String absPath, Privilege[] privileges) {
return true;
}
@Override
public Privilege[] getSupportedPrivileges(String absPath) {
return new Privilege[0];
}
@Override
public Privilege[] getPrivileges(String absPath) {
return new Privilege[0];
}
@Override
public AccessControlPolicy[] getPolicies(String absPath) {
return new AccessControlPolicy[0];
}
@Override
public AccessControlPolicy[] getEffectivePolicies(String absPath) {
return new AccessControlPolicy[0];
}
@Override
public AccessControlPolicyIterator getApplicablePolicies(String absPath) {
return AccessControlPolicyIteratorAdapter.EMPTY;
}
});
}
/**
* @see javax.jcr.Session#getRetentionManager()
*/
@Override
@Nonnull
public RetentionManager getRetentionManager() throws RepositoryException {
throw new UnsupportedRepositoryOperationException("Retention Management is not supported.");
}
//---------------------------------------------------------< Namespaces >---
// The code below was initially copied from JCR Commons AbstractSession, but
// provides information the "hasRemappings" information
@Override
public void setNamespacePrefix(String prefix, String uri) throws RepositoryException {
if (prefix == null) {
throw new IllegalArgumentException("Prefix must not be null");
} else if (uri == null) {
throw new IllegalArgumentException("Namespace must not be null");
} else if (prefix.isEmpty()) {
throw new NamespaceException(
"Empty prefix is reserved and can not be remapped");
} else if (uri.isEmpty()) {
throw new NamespaceException(
"Default namespace is reserved and can not be remapped");
} else if (prefix.toLowerCase(Locale.ENGLISH).startsWith("xml")) {
throw new NamespaceException(
"XML prefixes are reserved: " + prefix);
} else if (!XMLChar.isValidNCName(prefix)) {
throw new NamespaceException(
"Prefix is not a valid XML NCName: " + prefix);
}
synchronized (namespaces) {
// Remove existing mapping for the given prefix
namespaces.remove(prefix);
// Remove existing mapping(s) for the given URI
Set<String> prefixes = new HashSet<String>();
for (Map.Entry<String, String> entry : namespaces.entrySet()) {
if (entry.getValue().equals(uri)) {
prefixes.add(entry.getKey());
}
}
namespaces.keySet().removeAll(prefixes);
// Add the new mapping
namespaces.put(prefix, uri);
}
}
@Override
public String[] getNamespacePrefixes() throws RepositoryException {
Set<String> uris = new HashSet<String>();
uris.addAll(Arrays.asList(getWorkspace().getNamespaceRegistry().getURIs()));
synchronized (namespaces) {
// Add namespace uris only visible to session
uris.addAll(namespaces.values());
}
Set<String> prefixes = new HashSet<String>();
for (String uri : uris) {
prefixes.add(getNamespacePrefix(uri));
}
return prefixes.toArray(new String[prefixes.size()]);
}
@Override
public String getNamespaceURI(String prefix) throws RepositoryException {
synchronized (namespaces) {
String uri = namespaces.get(prefix);
if (uri == null) {
// Not in local mappings, try the global ones
uri = getWorkspace().getNamespaceRegistry().getURI(prefix);
if (namespaces.containsValue(uri)) {
// The global URI is locally mapped to some other prefix,
// so there are no mappings for this prefix
throw new NamespaceException("Namespace not found: " + prefix);
}
}
return uri;
}
}
@Override
public String getNamespacePrefix(String uri) throws RepositoryException {
synchronized (namespaces) {
for (Map.Entry<String, String> entry : namespaces.entrySet()) {
if (entry.getValue().equals(uri)) {
return entry.getKey();
}
}
// The following throws an exception if the URI is not found, that's OK
String prefix = getWorkspace().getNamespaceRegistry().getPrefix(uri);
// Generate a new prefix if the global mapping is already taken
String base = prefix;
for (int i = 2; namespaces.containsKey(prefix); i++) {
prefix = base + i;
}
if (!base.equals(prefix)) {
namespaces.put(prefix, uri);
}
return prefix;
}
}
boolean hasSessionLocalMappings() {
return !namespaces.isEmpty();
}
//--------------------------------------------------< JackrabbitSession >---
@Override
@Nonnull
public PrincipalManager getPrincipalManager() throws RepositoryException {
return dlg.getPrincipalManager();
}
@Override
@Nonnull
public UserManager getUserManager() throws RepositoryException {
return dlg.getUserManager();
}
//------------------------------------------------------------< private >---
/**
* Ensure that this session is alive and throw an exception otherwise.
*
* @throws RepositoryException if this session has been rendered invalid
* for some reason (e.g. if this session has been closed explicitly by logout)
*/
private void ensureIsAlive() throws RepositoryException {
// check session status
if (!dlg.isAlive()) {
throw new RepositoryException("This session has been closed.");
}
}
}
|
minor improvement: remove unused method
git-svn-id: 67138be12999c61558c3dd34328380c8e4523e73@1421459 13f79535-47bb-0310-9956-ffa450edef68
|
oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/SessionImpl.java
|
minor improvement: remove unused method
|
|
Java
|
apache-2.0
|
9230f9074aa38194f92728a39647aa3abd6bc9e8
| 0
|
folio-org/okapi,folio-org/okapi
|
package org.folio.okapi.service.impl;
import io.vertx.core.Handler;
import io.vertx.core.json.Json;
import io.vertx.core.json.JsonObject;
import io.vertx.core.logging.Logger;
import io.vertx.ext.mongo.MongoClient;
import io.vertx.ext.mongo.UpdateOptions;
import java.util.LinkedList;
import java.util.List;
import static org.folio.okapi.common.ErrorType.INTERNAL;
import static org.folio.okapi.common.ErrorType.NOT_FOUND;
import org.folio.okapi.common.ExtendedAsyncResult;
import org.folio.okapi.common.Failure;
import org.folio.okapi.common.OkapiLogger;
import org.folio.okapi.common.Success;
@java.lang.SuppressWarnings({"squid:S1192"})
class MongoUtil<T> {
private final String collection;
private final MongoClient cli;
private Logger logger = OkapiLogger.get();
public MongoUtil(String collection, MongoClient cli) {
this.collection = collection;
this.cli = cli;
}
public void delete(String id, Handler<ExtendedAsyncResult<Void>> fut) {
JsonObject jq = new JsonObject().put("_id", id);
cli.removeDocument(collection, jq, rres -> {
if (rres.failed()) {
logger.warn("MongoUtil.delete " + id + " failed : " + rres.cause());
fut.handle(new Failure<>(INTERNAL, rres.cause()));
} else if (rres.result().getRemovedCount() == 0) {
fut.handle(new Failure<>(NOT_FOUND, id));
} else {
fut.handle(new Success<>());
}
});
}
public void init(boolean reset, Handler<ExtendedAsyncResult<Void>> fut) {
if (!reset) {
fut.handle(new Success<>());
} else {
cli.dropCollection(collection, res -> {
if (res.failed()) {
fut.handle(new Failure<>(INTERNAL, res.cause()));
} else {
fut.handle(new Success<>());
}
});
}
}
public void add(T env, String id, Handler<ExtendedAsyncResult<Void>> fut) {
JsonObject jq = new JsonObject().put("_id", id);
String s = Json.encodePrettily(env);
JsonObject document = new JsonObject(s);
encode(document, null); // _id can not be put for Vert.x 3.5.1
UpdateOptions options = new UpdateOptions().setUpsert(true);
cli.updateCollectionWithOptions(collection, jq, new JsonObject().put("$set", document), options, res -> {
if (res.succeeded()) {
fut.handle(new Success<>());
} else {
logger.warn("MongoUtil.add " + id + " failed : " + res.cause());
logger.warn("Document: " + document.encodePrettily());
fut.handle(new Failure<>(INTERNAL, res.cause()));
}
});
}
public void insert(T md, String id, Handler<ExtendedAsyncResult<Void>> fut) {
String s = Json.encodePrettily(md);
JsonObject document = new JsonObject(s);
encode(document, id);
cli.insert(collection, document, res -> {
if (res.succeeded()) {
fut.handle(new Success<>());
} else {
logger.warn("MongoUtil.insert " + id + " failed : " + res.cause());
logger.warn("Document: " + document.encodePrettily());
fut.handle(new Failure<>(INTERNAL, res.cause()));
}
});
}
public void getAll(Class<T> clazz, Handler<ExtendedAsyncResult<List<T>>> fut) {
final String q = "{}";
JsonObject jq = new JsonObject(q);
cli.find(collection, jq, res -> {
if (res.failed()) {
fut.handle(new Failure<>(INTERNAL, res.cause()));
} else {
List<JsonObject> resl = res.result();
List<T> ml = new LinkedList<>();
for (JsonObject jo : resl) {
decode(jo);
T env = Json.decodeValue(jo.encode(), clazz);
ml.add(env);
}
fut.handle(new Success<>(ml));
}
});
}
public void encode(JsonObject j, String id) {
if (id != null) {
j.put("_id", id);
}
JsonObject o = j.getJsonObject("enabled");
if (o != null) {
JsonObject repl = new JsonObject();
for (String m : o.fieldNames()) {
String n = m.replace(".", "__");
repl.put(n, o.getBoolean(m));
}
j.put("enabled", repl);
}
}
public void decode(JsonObject j) {
j.remove("_id");
JsonObject o = j.getJsonObject("enabled");
if (o != null) {
JsonObject repl = new JsonObject();
for (String m : o.fieldNames()) {
if (m.contains("_")) {
String n = m.replace("__", ".");
repl.put(n, o.getBoolean(m));
}
}
j.put("enabled", repl);
}
}
}
|
okapi-core/src/main/java/org/folio/okapi/service/impl/MongoUtil.java
|
package org.folio.okapi.service.impl;
import io.vertx.core.Handler;
import io.vertx.core.json.Json;
import io.vertx.core.json.JsonObject;
import io.vertx.core.logging.Logger;
import io.vertx.ext.mongo.MongoClient;
import io.vertx.ext.mongo.UpdateOptions;
import java.util.LinkedList;
import java.util.List;
import static org.folio.okapi.common.ErrorType.INTERNAL;
import static org.folio.okapi.common.ErrorType.NOT_FOUND;
import org.folio.okapi.common.ExtendedAsyncResult;
import org.folio.okapi.common.Failure;
import org.folio.okapi.common.OkapiLogger;
import org.folio.okapi.common.Success;
@java.lang.SuppressWarnings({"squid:S1192"})
class MongoUtil<T> {
private final String collection;
private final MongoClient cli;
private Logger logger = OkapiLogger.get();
public MongoUtil(String collection, MongoClient cli) {
this.collection = collection;
this.cli = cli;
}
public void delete(String id, Handler<ExtendedAsyncResult<Void>> fut) {
JsonObject jq = new JsonObject().put("_id", id);
cli.removeDocument(collection, jq, rres -> {
if (rres.failed()) {
logger.warn("MongoUtil.delete " + id + " failed : " + rres.cause());
fut.handle(new Failure<>(INTERNAL, rres.cause()));
} else if (rres.result().getRemovedCount() == 0) {
fut.handle(new Failure<>(NOT_FOUND, id));
} else {
fut.handle(new Success<>());
}
});
}
public void init(boolean reset, Handler<ExtendedAsyncResult<Void>> fut) {
if (!reset) {
fut.handle(new Success<>());
} else {
cli.dropCollection(collection, res -> {
if (res.failed()) {
fut.handle(new Failure<>(INTERNAL, res.cause()));
} else {
fut.handle(new Success<>());
}
});
}
}
public void add(T env, String id, Handler<ExtendedAsyncResult<Void>> fut) {
JsonObject jq = new JsonObject().put("_id", id);
String s = Json.encodePrettily(env);
JsonObject document = new JsonObject(s);
encode(document, id);
UpdateOptions options = new UpdateOptions().setUpsert(true);
cli.updateCollectionWithOptions(collection, jq, new JsonObject().put("$set", document), options, res -> {
if (res.succeeded()) {
fut.handle(new Success<>());
} else {
logger.warn("MongoUtil.add " + id + " failed : " + res.cause());
logger.warn("Document: " + document.encodePrettily());
fut.handle(new Failure<>(INTERNAL, res.cause()));
}
});
}
public void insert(T md, String id, Handler<ExtendedAsyncResult<Void>> fut) {
String s = Json.encodePrettily(md);
JsonObject document = new JsonObject(s);
encode(document, id);
document.put("_id", id);
cli.insert(collection, document, res -> {
if (res.succeeded()) {
fut.handle(new Success<>());
} else {
logger.warn("MongoUtil.insert " + id + " failed : " + res.cause());
logger.warn("Document: " + document.encodePrettily());
fut.handle(new Failure<>(INTERNAL, res.cause()));
}
});
}
public void getAll(Class<T> clazz, Handler<ExtendedAsyncResult<List<T>>> fut) {
final String q = "{}";
JsonObject jq = new JsonObject(q);
cli.find(collection, jq, res -> {
if (res.failed()) {
fut.handle(new Failure<>(INTERNAL, res.cause()));
} else {
List<JsonObject> resl = res.result();
List<T> ml = new LinkedList<>();
for (JsonObject jo : resl) {
decode(jo);
T env = Json.decodeValue(jo.encode(), clazz);
ml.add(env);
}
fut.handle(new Success<>(ml));
}
});
}
public void encode(JsonObject j, String id) {
JsonObject o = j.getJsonObject("enabled");
if (o != null) {
JsonObject repl = new JsonObject();
for (String m : o.fieldNames()) {
String n = m.replace(".", "__");
repl.put(n, o.getBoolean(m));
}
j.put("enabled", repl);
}
}
public void decode(JsonObject j) {
j.remove("_id");
JsonObject o = j.getJsonObject("enabled");
if (o != null) {
JsonObject repl = new JsonObject();
for (String m : o.fieldNames()) {
if (m.contains("_")) {
String n = m.replace("__", ".");
repl.put(n, o.getBoolean(m));
}
}
j.put("enabled", repl);
}
}
}
|
Refactor for Vert.x and _id member Mongo OKAPI-605
|
okapi-core/src/main/java/org/folio/okapi/service/impl/MongoUtil.java
|
Refactor for Vert.x and _id member Mongo OKAPI-605
|
|
Java
|
apache-2.0
|
6a3349d387d422a9a4a56ea4e1d194c0ca46f847
| 0
|
d3sw/conductor,d3sw/conductor,d3sw/conductor,d3sw/conductor,d3sw/conductor,d3sw/conductor
|
/**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.conductor.common.run;
import com.netflix.conductor.common.metadata.Auditable;
import com.netflix.conductor.common.metadata.tasks.Task;
import java.util.*;
public class Workflow extends Auditable {
public enum WorkflowStatus {
RUNNING(false, false), COMPLETED(true, true), FAILED(true, false), TIMED_OUT(true, false), TERMINATED(true, false), CANCELLED(true, false), PAUSED(false, true), RESET(true, false);
private boolean terminal;
private boolean successful;
WorkflowStatus(boolean terminal, boolean successful){
this.terminal = terminal;
this.successful = successful;
}
public boolean isTerminal(){
return terminal;
}
public boolean isSuccessful(){
return successful;
}
}
private long revision = 1;
private WorkflowStatus status = WorkflowStatus.RUNNING;
private long endTime;
private String workflowId;
private String parentWorkflowId;
private String parentWorkflowTaskId;
private List<Task> tasks = new LinkedList<>();
private Map<String, Object> input = new HashMap<>();
private Map<String, Object> output = new HashMap<>();;
private Map<String, Object> attributes = new HashMap<>();;
private String workflowType;
private int version;
private String correlationId;
private String reRunFromWorkflowId;
private String reasonForIncompletion;
private int schemaVersion;
private String event;
private Map<String, String> taskToDomain = new HashMap<>();
private List<String> workflowIds = new ArrayList<>();
private Map<String, Object> authorization = new HashMap<>();
private String contextToken;
private String contextUser;
private String clientId;
private Set<String> tags = new HashSet<>();
private int restartCount;
private int rerunCount;
private String cancelledBy;
private String traceId;
private boolean resetTags;
private int jobPriority = 5;
public Workflow(){
}
/**
* @return the status
*/
public WorkflowStatus getStatus() {
return status;
}
/**
* @param status the status to set
*/
public void setStatus(WorkflowStatus status) {
this.status = status;
}
/**
* @return the startTime
*/
public long getStartTime() {
return getCreateTime();
}
/**
* @param startTime the startTime to set
*/
public void setStartTime(long startTime) {
this.setCreateTime(startTime);
}
/**
* @return the endTime
*/
public long getEndTime() {
return endTime;
}
/**
* @param endTime the endTime to set
*/
public void setEndTime(long endTime) {
this.endTime = endTime;
}
/**
* @return the duration of the workflow
*/
public long getDuration() {
return getEndTime() - getStartTime();
}
/**
* @return the workflowId
*/
public String getWorkflowId() {
return workflowId;
}
/**
* @param workflowId the workflowId to set
*/
public void setWorkflowId(String workflowId) {
this.workflowId = workflowId;
}
/**
* @return the tasks which are scheduled, in progress or completed.
*/
public List<Task> getTasks() {
return tasks;
}
/**
* @param tasks the tasks to set
*/
public void setTasks(List<Task> tasks) {
this.tasks = tasks;
}
/**
* @return the input
*/
public Map<String, Object> getInput() {
return input;
}
/**
* @param input the input to set
*/
public void setInput(Map<String, Object> input) {
this.input = input;
}
/**
* @return the task to domain map
*/
public Map<String, String> getTaskToDomain() {
return taskToDomain;
}
/**
* @param taskToDomain the task to domain map
*/
public void setTaskToDomain(Map<String, String> taskToDomain) {
this.taskToDomain = taskToDomain;
}
/**
* @return the output
*/
public Map<String, Object> getOutput() {
return output;
}
/**
* @param output the output to set
*/
public void setOutput(Map<String, Object> output) {
this.output = output;
}
/**
*
* @return The correlation id used when starting the workflow
*/
public String getCorrelationId() {
return correlationId;
}
/**
*
* @param correlationId the correlation id
*/
public void setCorrelationId(String correlationId) {
this.correlationId = correlationId;
}
/**
*
* @return Workflow Type / Definition
*/
public String getWorkflowType() {
return workflowType;
}
/**
*
* @param workflowType Workflow type
*/
public void setWorkflowType(String workflowType) {
this.workflowType = workflowType;
}
/**
* @return the version
*/
public int getVersion() {
return version;
}
/**
* @param version the version to set
*/
public void setVersion(int version) {
this.version = version;
}
public String getReRunFromWorkflowId() {
return reRunFromWorkflowId;
}
public void setReRunFromWorkflowId(String reRunFromWorkflowId) {
this.reRunFromWorkflowId = reRunFromWorkflowId;
}
public String getReasonForIncompletion() {
return reasonForIncompletion;
}
public void setReasonForIncompletion(String reasonForIncompletion) {
this.reasonForIncompletion = reasonForIncompletion;
}
/**
* @return the parentWorkflowId
*/
public String getParentWorkflowId() {
return parentWorkflowId;
}
/**
* @param parentWorkflowId the parentWorkflowId to set
*/
public void setParentWorkflowId(String parentWorkflowId) {
this.parentWorkflowId = parentWorkflowId;
}
/**
* @return whether this workflow is a sub-workflow.
*/
public boolean isSubWorkflow() {
final String parentId = getParentWorkflowId();
return parentId != null ? !parentId.isEmpty() : false;
}
/**
* @return the parentWorkflowTaskId
*/
public String getParentWorkflowTaskId() {
return parentWorkflowTaskId;
}
/**
* @param parentWorkflowTaskId the parentWorkflowTaskId to set
*/
public void setParentWorkflowTaskId(String parentWorkflowTaskId) {
this.parentWorkflowTaskId = parentWorkflowTaskId;
}
/**
* @return the schemaVersion Version of the schema for the workflow definition
*/
public int getSchemaVersion() {
return schemaVersion;
}
/**
* @param schemaVersion the schemaVersion to set
*/
public void setSchemaVersion(int schemaVersion) {
this.schemaVersion = schemaVersion;
}
/**
*
* @return Name of the event that started the workflow
*/
public String getEvent() {
return event;
}
/**
*
* @param event Name of the event that started the workflow
*/
public void setEvent(String event) {
this.event = event;
}
public List<String> getWorkflowIds() {
return workflowIds;
}
public void setWorkflowIds(List<String> workflowIds) {
this.workflowIds = workflowIds;
}
public Map<String, Object> getAuthorization() {
return authorization;
}
public void setAuthorization(Map<String, Object> authorization) {
this.authorization = authorization;
}
public String getContextToken() {
return contextToken;
}
public void setContextToken(String contextToken) {
this.contextToken = contextToken;
}
public String getContextUser() {
return contextUser;
}
public void setContextUser(String contextUser) {
this.contextUser = contextUser;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public Set<String> getTags() {
return tags;
}
public void setTags(Set<String> tags) {
this.tags = tags;
}
public int getRestartCount() {
return restartCount;
}
public void setRestartCount(int restartCount) {
this.restartCount = restartCount;
}
public void incRestartCount() {
this.restartCount++;
}
public String getCancelledBy() {
return cancelledBy;
}
public void setCancelledBy(String cancelledBy) {
this.cancelledBy = cancelledBy;
}
public int getRerunCount() {
return rerunCount;
}
public void setRerunCount(int rerunCount) {
this.rerunCount = rerunCount;
}
public void incRerunCount() {
this.rerunCount++;
}
public String getTraceId() {
return traceId;
}
public void setTraceId(String traceId) {
this.traceId = traceId;
}
public int getJobPriority() {
return jobPriority;
}
public void setJobPriority(int jobPriority) {
this.jobPriority = jobPriority;
}
public Map<String, Object> getAttributes() {
return attributes;
}
public void setAttributes(Map<String, Object> attributes) {
this.attributes = attributes;
}
public boolean getResetTags() {
return resetTags;
}
public void setResetTags(boolean resetTags) {
this.resetTags = resetTags;
}
@Override
public String toString() {
return workflowType + "." + version + "/" + workflowId + "." + status;
}
public Task getTaskByRefName(String refName) {
if (refName == null) {
throw new RuntimeException("refName passed is null. Check the workflow execution. For dynamic tasks, make sure referenceTaskName is set to a not null value");
}
LinkedList<Task> found = new LinkedList<Task>();
for (Task t : tasks) {
if (t.getReferenceTaskName() == null) {
throw new RuntimeException("Task " + t.getTaskDefName() + ", seq=" + t.getSeq() + " does not have reference name specified.");
}
if (t.getReferenceTaskName().equals(refName)) {
found.add(t);
}
}
if (found.isEmpty()) {
return null;
}
return found.getLast();
}
}
|
common/src/main/java/com/netflix/conductor/common/run/Workflow.java
|
/**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.conductor.common.run;
import com.netflix.conductor.common.metadata.Auditable;
import com.netflix.conductor.common.metadata.tasks.Task;
import java.util.*;
public class Workflow extends Auditable {
public enum WorkflowStatus {
RUNNING(false, false), COMPLETED(true, true), FAILED(true, false), TIMED_OUT(true, false), TERMINATED(true, false), CANCELLED(true, false), PAUSED(false, true), RESET(true, false);
private boolean terminal;
private boolean successful;
WorkflowStatus(boolean terminal, boolean successful){
this.terminal = terminal;
this.successful = successful;
}
public boolean isTerminal(){
return terminal;
}
public boolean isSuccessful(){
return successful;
}
}
private long revision = 1;
private WorkflowStatus status = WorkflowStatus.RUNNING;
private long endTime;
private String workflowId;
private String parentWorkflowId;
private String parentWorkflowTaskId;
private List<Task> tasks = new LinkedList<>();
private Map<String, Object> input = new HashMap<>();
private Map<String, Object> output = new HashMap<>();;
private Map<String, Object> attributes = new HashMap<>();;
private String workflowType;
private int version;
private String correlationId;
private String reRunFromWorkflowId;
private String reasonForIncompletion;
private int schemaVersion;
private String event;
private Map<String, String> taskToDomain = new HashMap<>();
private List<String> workflowIds = new ArrayList<>();
private Map<String, Object> authorization = new HashMap<>();
private String contextToken;
private String contextUser;
private String clientId;
private Set<String> tags = new HashSet<>();
private int restartCount;
private int rerunCount;
private String cancelledBy;
private String traceId;
private boolean resetTags;
private int jobPriority;
public Workflow(){
}
/**
* @return the status
*/
public WorkflowStatus getStatus() {
return status;
}
/**
* @param status the status to set
*/
public void setStatus(WorkflowStatus status) {
this.status = status;
}
/**
* @return the startTime
*/
public long getStartTime() {
return getCreateTime();
}
/**
* @param startTime the startTime to set
*/
public void setStartTime(long startTime) {
this.setCreateTime(startTime);
}
/**
* @return the endTime
*/
public long getEndTime() {
return endTime;
}
/**
* @param endTime the endTime to set
*/
public void setEndTime(long endTime) {
this.endTime = endTime;
}
/**
* @return the duration of the workflow
*/
public long getDuration() {
return getEndTime() - getStartTime();
}
/**
* @return the workflowId
*/
public String getWorkflowId() {
return workflowId;
}
/**
* @param workflowId the workflowId to set
*/
public void setWorkflowId(String workflowId) {
this.workflowId = workflowId;
}
/**
* @return the tasks which are scheduled, in progress or completed.
*/
public List<Task> getTasks() {
return tasks;
}
/**
* @param tasks the tasks to set
*/
public void setTasks(List<Task> tasks) {
this.tasks = tasks;
}
/**
* @return the input
*/
public Map<String, Object> getInput() {
return input;
}
/**
* @param input the input to set
*/
public void setInput(Map<String, Object> input) {
this.input = input;
}
/**
* @return the task to domain map
*/
public Map<String, String> getTaskToDomain() {
return taskToDomain;
}
/**
* @param taskToDomain the task to domain map
*/
public void setTaskToDomain(Map<String, String> taskToDomain) {
this.taskToDomain = taskToDomain;
}
/**
* @return the output
*/
public Map<String, Object> getOutput() {
return output;
}
/**
* @param output the output to set
*/
public void setOutput(Map<String, Object> output) {
this.output = output;
}
/**
*
* @return The correlation id used when starting the workflow
*/
public String getCorrelationId() {
return correlationId;
}
/**
*
* @param correlationId the correlation id
*/
public void setCorrelationId(String correlationId) {
this.correlationId = correlationId;
}
/**
*
* @return Workflow Type / Definition
*/
public String getWorkflowType() {
return workflowType;
}
/**
*
* @param workflowType Workflow type
*/
public void setWorkflowType(String workflowType) {
this.workflowType = workflowType;
}
/**
* @return the version
*/
public int getVersion() {
return version;
}
/**
* @param version the version to set
*/
public void setVersion(int version) {
this.version = version;
}
public String getReRunFromWorkflowId() {
return reRunFromWorkflowId;
}
public void setReRunFromWorkflowId(String reRunFromWorkflowId) {
this.reRunFromWorkflowId = reRunFromWorkflowId;
}
public String getReasonForIncompletion() {
return reasonForIncompletion;
}
public void setReasonForIncompletion(String reasonForIncompletion) {
this.reasonForIncompletion = reasonForIncompletion;
}
/**
* @return the parentWorkflowId
*/
public String getParentWorkflowId() {
return parentWorkflowId;
}
/**
* @param parentWorkflowId the parentWorkflowId to set
*/
public void setParentWorkflowId(String parentWorkflowId) {
this.parentWorkflowId = parentWorkflowId;
}
/**
* @return whether this workflow is a sub-workflow.
*/
public boolean isSubWorkflow() {
final String parentId = getParentWorkflowId();
return parentId != null ? !parentId.isEmpty() : false;
}
/**
* @return the parentWorkflowTaskId
*/
public String getParentWorkflowTaskId() {
return parentWorkflowTaskId;
}
/**
* @param parentWorkflowTaskId the parentWorkflowTaskId to set
*/
public void setParentWorkflowTaskId(String parentWorkflowTaskId) {
this.parentWorkflowTaskId = parentWorkflowTaskId;
}
/**
* @return the schemaVersion Version of the schema for the workflow definition
*/
public int getSchemaVersion() {
return schemaVersion;
}
/**
* @param schemaVersion the schemaVersion to set
*/
public void setSchemaVersion(int schemaVersion) {
this.schemaVersion = schemaVersion;
}
/**
*
* @return Name of the event that started the workflow
*/
public String getEvent() {
return event;
}
/**
*
* @param event Name of the event that started the workflow
*/
public void setEvent(String event) {
this.event = event;
}
public List<String> getWorkflowIds() {
return workflowIds;
}
public void setWorkflowIds(List<String> workflowIds) {
this.workflowIds = workflowIds;
}
public Map<String, Object> getAuthorization() {
return authorization;
}
public void setAuthorization(Map<String, Object> authorization) {
this.authorization = authorization;
}
public String getContextToken() {
return contextToken;
}
public void setContextToken(String contextToken) {
this.contextToken = contextToken;
}
public String getContextUser() {
return contextUser;
}
public void setContextUser(String contextUser) {
this.contextUser = contextUser;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public Set<String> getTags() {
return tags;
}
public void setTags(Set<String> tags) {
this.tags = tags;
}
public int getRestartCount() {
return restartCount;
}
public void setRestartCount(int restartCount) {
this.restartCount = restartCount;
}
public void incRestartCount() {
this.restartCount++;
}
public String getCancelledBy() {
return cancelledBy;
}
public void setCancelledBy(String cancelledBy) {
this.cancelledBy = cancelledBy;
}
public int getRerunCount() {
return rerunCount;
}
public void setRerunCount(int rerunCount) {
this.rerunCount = rerunCount;
}
public void incRerunCount() {
this.rerunCount++;
}
public String getTraceId() {
return traceId;
}
public void setTraceId(String traceId) {
this.traceId = traceId;
}
public int getJobPriority() {
return jobPriority;
}
public void setJobPriority(int jobPriority) {
this.jobPriority = jobPriority;
}
public Map<String, Object> getAttributes() {
return attributes;
}
public void setAttributes(Map<String, Object> attributes) {
this.attributes = attributes;
}
public boolean getResetTags() {
return resetTags;
}
public void setResetTags(boolean resetTags) {
this.resetTags = resetTags;
}
@Override
public String toString() {
return workflowType + "." + version + "/" + workflowId + "." + status;
}
public Task getTaskByRefName(String refName) {
if (refName == null) {
throw new RuntimeException("refName passed is null. Check the workflow execution. For dynamic tasks, make sure referenceTaskName is set to a not null value");
}
LinkedList<Task> found = new LinkedList<Task>();
for (Task t : tasks) {
if (t.getReferenceTaskName() == null) {
throw new RuntimeException("Task " + t.getTaskDefName() + ", seq=" + t.getSeq() + " does not have reference name specified.");
}
if (t.getReferenceTaskName().equals(refName)) {
found.add(t);
}
}
if (found.isEmpty()) {
return null;
}
return found.getLast();
}
}
|
ONECOND-1798-Conductor - prioritize its work (job priority)
|
common/src/main/java/com/netflix/conductor/common/run/Workflow.java
|
ONECOND-1798-Conductor - prioritize its work (job priority)
|
|
Java
|
apache-2.0
|
ba7ea9fcce856c60ca18728e41282cb457221db8
| 0
|
GideonLeGrange/panstamp-tools
|
package me.legrange.panstamp.gui;
import me.legrange.panstamp.gui.task.Task;
import me.legrange.panstamp.gui.task.TaskRunner;
/**
*
* @author gideon
*/
public class WaitDialog extends javax.swing.JDialog implements TaskRunner {
/**
* Creates new form WaitDialog
*/
public WaitDialog(java.awt.Frame parent, Task task) {
super(parent, true);
initComponents();
setLocationRelativeTo(null);
this.task = task;
}
public Object start() throws Throwable {
task.start(this);
setVisible(true);
if (error != null) throw error;
return result;
}
@Override
public void completed(Object result) {
this.result = result;
dispose();
}
@Override
public void error(Throwable e) {
this.error = e;
dispose();
}
@Override
public void update(int progress, String stage) {
// progressBar.setValue(progress);
progressLabel.setText(stage);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
networkPanel = new javax.swing.JPanel();
progressLabel = new javax.swing.JLabel();
jLabel1 = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.DO_NOTHING_ON_CLOSE);
setUndecorated(true);
setResizable(false);
progressLabel.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
progressLabel.setText(".");
jLabel1.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/ajax-loader.gif"))); // NOI18N
javax.swing.GroupLayout networkPanelLayout = new javax.swing.GroupLayout(networkPanel);
networkPanel.setLayout(networkPanelLayout);
networkPanelLayout.setHorizontalGroup(
networkPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(networkPanelLayout.createSequentialGroup()
.addGroup(networkPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel1, javax.swing.GroupLayout.DEFAULT_SIZE, 361, Short.MAX_VALUE)
.addComponent(progressLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
networkPanelLayout.setVerticalGroup(
networkPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, networkPanelLayout.createSequentialGroup()
.addContainerGap(15, Short.MAX_VALUE)
.addComponent(progressLabel)
.addGap(18, 18, 18)
.addComponent(jLabel1)
.addGap(14, 14, 14))
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(networkPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(networkPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
);
pack();
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel jLabel1;
private javax.swing.JPanel networkPanel;
private javax.swing.JLabel progressLabel;
// End of variables declaration//GEN-END:variables
private final Task task;
private Object result;
private Throwable error;
}
|
src/main/java/me/legrange/panstamp/gui/WaitDialog.java
|
package me.legrange.panstamp.gui;
import me.legrange.panstamp.gui.task.Task;
import me.legrange.panstamp.gui.task.TaskRunner;
/**
*
* @author gideon
*/
public class WaitDialog extends javax.swing.JDialog implements TaskRunner {
/**
* Creates new form WaitDialog
*/
public WaitDialog(java.awt.Frame parent, Task task) {
super(parent, true);
initComponents();
setLocationRelativeTo(null);
this.task = task;
}
public Object start() throws Throwable {
System.out.println("WD::start() => task");
task.start(this);
System.out.println("WD::start() => visible");
setVisible(true);
System.out.println("WD::start() => return");
if (error != null) throw error;
return result;
}
@Override
public void completed(Object result) {
this.result = result;
dispose();
}
@Override
public void error(Throwable e) {
this.error = e;
dispose();
}
@Override
public void update(int progress, String stage) {
// progressBar.setValue(progress);
progressLabel.setText(stage);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
networkPanel = new javax.swing.JPanel();
progressLabel = new javax.swing.JLabel();
jLabel1 = new javax.swing.JLabel();
setDefaultCloseOperation(javax.swing.WindowConstants.DO_NOTHING_ON_CLOSE);
setUndecorated(true);
setResizable(false);
progressLabel.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
progressLabel.setText(".");
jLabel1.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/images/ajax-loader.gif"))); // NOI18N
javax.swing.GroupLayout networkPanelLayout = new javax.swing.GroupLayout(networkPanel);
networkPanel.setLayout(networkPanelLayout);
networkPanelLayout.setHorizontalGroup(
networkPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(networkPanelLayout.createSequentialGroup()
.addGroup(networkPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel1, javax.swing.GroupLayout.DEFAULT_SIZE, 361, Short.MAX_VALUE)
.addComponent(progressLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
networkPanelLayout.setVerticalGroup(
networkPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, networkPanelLayout.createSequentialGroup()
.addContainerGap(15, Short.MAX_VALUE)
.addComponent(progressLabel)
.addGap(18, 18, 18)
.addComponent(jLabel1)
.addGap(14, 14, 14))
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(networkPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(networkPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
);
pack();
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel jLabel1;
private javax.swing.JPanel networkPanel;
private javax.swing.JLabel progressLabel;
// End of variables declaration//GEN-END:variables
private final Task task;
private Object result;
private Throwable error;
}
|
Removed stray debug output
|
src/main/java/me/legrange/panstamp/gui/WaitDialog.java
|
Removed stray debug output
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.