answer
stringlengths 17
10.2M
|
|---|
package jkind.translation;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import jkind.lustre.ArrayAccessExpr;
import jkind.lustre.ArrayExpr;
import jkind.lustre.ArrayUpdateExpr;
import jkind.lustre.BinaryExpr;
import jkind.lustre.BoolExpr;
import jkind.lustre.CastExpr;
import jkind.lustre.CondactExpr;
import jkind.lustre.IdExpr;
import jkind.lustre.IfThenElseExpr;
import jkind.lustre.IntExpr;
import jkind.lustre.NamedType;
import jkind.lustre.NodeCallExpr;
import jkind.lustre.RealExpr;
import jkind.lustre.RecordAccessExpr;
import jkind.lustre.RecordExpr;
import jkind.lustre.RecordUpdateExpr;
import jkind.lustre.TupleExpr;
import jkind.lustre.UnaryExpr;
import jkind.lustre.visitors.ExprVisitor;
import jxl.CellReferenceHelper;
public class Expr2FormulaVisitor implements ExprVisitor<Void> {
private int column;
private final String id;
private final Map<String, Integer> rowAssignments;
private final Map<String, String> intToEnum;
private final Map<String, String> enumToInt;
private final Set<String> refs;
private final StringBuilder buf;
final private static int INITIAL_COLUMN = 1;
public Expr2FormulaVisitor(String id, int column, Map<String, Integer> rowAssignments,
Map<String, String> intToEnum, Map<String, String> enumToInt) {
this.id = id;
this.column = column;
this.rowAssignments = rowAssignments;
this.intToEnum = intToEnum;
this.enumToInt = enumToInt;
this.refs = new LinkedHashSet<>();
this.buf = new StringBuilder();
}
@Override
public String toString() {
if (refs.isEmpty()) {
return wrap(buf.toString());
}
StringBuilder result = new StringBuilder();
result.append("IF(OR(");
boolean first = true;
for (String ref : refs) {
if (!first) {
result.append(",");
}
result.append("IF(ISERROR(" + ref + "),FALSE," + ref + "=\"\")");
first = false;
}
result.append("), \"\", ");
result.append(wrap(buf.toString()));
result.append(")");
return result.toString();
}
private String wrap(String formula) {
if (intToEnum.containsKey(id)) {
return "HLOOKUP(" + buf + "," + intToEnum.get(id) + ",2,FALSE)";
} else {
return formula;
}
}
@Override
public Void visit(ArrayAccessExpr e) {
throw new IllegalArgumentException("Arrays must be flattened before translation to formula");
}
@Override
public Void visit(ArrayExpr e) {
throw new IllegalArgumentException("Arrays must be flattened before translation to formula");
}
@Override
public Void visit(ArrayUpdateExpr e) {
throw new IllegalArgumentException("Arrays must be flattened before translation to formula");
}
@Override
public Void visit(BinaryExpr e) {
switch (e.op) {
case ARROW:
if (column > INITIAL_COLUMN) {
e.right.accept(this);
} else {
e.left.accept(this);
}
return null;
case OR:
case AND:
buf.append(e.op.toString().toUpperCase());
buf.append("(");
e.left.accept(this);
buf.append(",");
e.right.accept(this);
buf.append(")");
return null;
case XOR:
buf.append("(");
e.left.accept(this);
buf.append("<>");
e.right.accept(this);
buf.append(")");
return null;
case INT_DIVIDE:
buf.append("INT(");
e.left.accept(this);
buf.append("/");
e.right.accept(this);
buf.append(")");
return null;
case MODULUS:
buf.append("MOD(");
e.left.accept(this);
buf.append(",");
e.right.accept(this);
buf.append(")");
return null;
case IMPLIES:
buf.append("OR(NOT(");
e.left.accept(this);
buf.append("),");
e.right.accept(this);
buf.append(")");
return null;
default:
buf.append("(");
e.left.accept(this);
buf.append(e.op);
e.right.accept(this);
buf.append(")");
return null;
}
}
@Override
public Void visit(BoolExpr e) {
buf.append(e.value ? "TRUE" : "FALSE");
return null;
}
@Override
public Void visit(CastExpr e) {
if (e.type == NamedType.REAL) {
e.expr.accept(this);
} else if (e.type == NamedType.INT) {
buf.append("FLOOR(");
e.expr.accept(this);
buf.append(",1)");
} else {
throw new IllegalArgumentException();
}
return null;
}
@Override
public Void visit(CondactExpr e) {
throw new IllegalArgumentException("Condacts must be removed before translation to formula");
}
@Override
public Void visit(IdExpr e) {
int row = rowAssignments.get(e.id);
String cell = CellReferenceHelper.getCellReference(column, row);
if (enumToInt.containsKey(e.id)) {
buf.append("HLOOKUP(" + cell + "," + enumToInt.get(e.id) + ",2,FALSE)");
} else {
buf.append(cell);
}
refs.add(cell);
return null;
}
@Override
public Void visit(IfThenElseExpr e) {
buf.append("IF(");
e.cond.accept(this);
buf.append(",");
e.thenExpr.accept(this);
buf.append(",");
e.elseExpr.accept(this);
buf.append(")");
return null;
}
@Override
public Void visit(IntExpr e) {
buf.append(e.value);
return null;
}
@Override
public Void visit(NodeCallExpr e) {
throw new IllegalArgumentException(
"Node calls must be inlined before translation to formula");
}
@Override
public Void visit(RealExpr e) {
buf.append(e.value.toPlainString());
return null;
}
@Override
public Void visit(RecordAccessExpr e) {
throw new IllegalArgumentException(
"Records must be flattened before translation to formula");
}
@Override
public Void visit(RecordExpr e) {
throw new IllegalArgumentException(
"Records must be flattened before translation to formula");
}
@Override
public Void visit(RecordUpdateExpr e) {
throw new IllegalArgumentException(
"Records must be flattened before translation to formula");
}
@Override
public Void visit(TupleExpr e) {
throw new IllegalArgumentException("Tuples must be flattened before translation to formula");
}
@Override
public Void visit(UnaryExpr e) {
switch (e.op) {
case PRE:
if (column == INITIAL_COLUMN) {
// Create an error value for pre in initial step
buf.append("(0+\"\")");
return null;
}
buf.append("(");
column
e.expr.accept(this);
column++;
buf.append(")");
return null;
case NEGATIVE:
buf.append("(-");
e.expr.accept(this);
buf.append(")");
return null;
case NOT:
buf.append("NOT(");
e.expr.accept(this);
buf.append(")");
return null;
default:
throw new IllegalArgumentException("Unknown unary operator");
}
}
}
|
/*
* NOTE: Replace "YOUR_USERNAME" and "YOUR_ACCESS_KEY" with your SauceLabs account!
*/
package tests.others;
import java.net.MalformedURLException;
import java.net.URL;
import org.junit.After;
import org.openqa.selenium.Platform;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.RemoteWebDriver;
import com.xceptance.xlt.api.engine.scripting.AbstractScriptTestCase;
public class TestRunnerRemoteInternetExplorer extends AbstractScriptTestCase
{
private WebDriver driver;
protected TestRunnerRemoteInternetExplorer()
{
final DesiredCapabilities caps = DesiredCapabilities.internetExplorer();
caps.setCapability("version", "11.0");
caps.setCapability("platform", Platform.WIN8_1);
try
{
driver = new RemoteWebDriver(
new URL("http://YOUR_USERNAME:YOUR_ACCESS_KEY@ondemand.saucelabs.com:80/wd/hub"), caps);
}
catch (final MalformedURLException e)
{
e.printStackTrace();
}
setWebDriver(driver);
}
@Override
@After
public void tearDown()
{
driver.quit();
}
}
|
package de.dhbw.humbuch.view;
import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.TreeMap;
import com.google.inject.Inject;
import com.vaadin.event.FieldEvents.TextChangeEvent;
import com.vaadin.event.FieldEvents.TextChangeListener;
import com.vaadin.navigator.View;
import com.vaadin.navigator.ViewChangeListener.ViewChangeEvent;
import com.vaadin.server.StreamResource;
import com.vaadin.ui.Alignment;
import com.vaadin.ui.Button;
import com.vaadin.ui.Button.ClickEvent;
import com.vaadin.ui.Button.ClickListener;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.TextField;
import com.vaadin.ui.VerticalLayout;
import de.davherrmann.mvvm.BasicState;
import de.davherrmann.mvvm.State;
import de.davherrmann.mvvm.StateChangeListener;
import de.davherrmann.mvvm.ViewModelComposer;
import de.davherrmann.mvvm.annotations.BindState;
import de.dhbw.humbuch.model.entity.BorrowedMaterial;
import de.dhbw.humbuch.model.entity.Grade;
import de.dhbw.humbuch.model.entity.Student;
import de.dhbw.humbuch.util.PDFHandler;
import de.dhbw.humbuch.util.PDFStudentList;
import de.dhbw.humbuch.view.components.ConfirmDialog;
import de.dhbw.humbuch.view.components.PrintingComponent;
import de.dhbw.humbuch.view.components.StudentMaterialSelector;
import de.dhbw.humbuch.view.components.StudentMaterialSelectorObserver;
import de.dhbw.humbuch.viewmodel.ReturnViewModel;
import de.dhbw.humbuch.viewmodel.ReturnViewModel.ReturnListStudent;
import de.dhbw.humbuch.viewmodel.StudentInformationViewModel;
import de.dhbw.humbuch.viewmodel.StudentInformationViewModel.Students;
/**
* This view displays the Returnscreen. It holds a horizontal headerbar
* containing actions and a StudentMaterialSelector with all information about
* the lent books of students. It is used to return the books and create student
* lists.
* */
public class ReturnView extends VerticalLayout implements View,
ViewInformation, StudentMaterialSelectorObserver {
private static final long serialVersionUID = -525078997965992622L;
private static final String TITLE = "Rückgabe";
private static final String SAVE_SELECTED_RETURNING = "Material zurückgegeben";
private static final String MANUAL_RETURN = "Manuelle Rückgabe";
private static final String MANUAL_RETURN_TITLE = "Manuelle Rückgabe";
private static final String STUDENT_LIST = "Schülerliste drucken";
private static final String STUDENT_LIST_PDF = "SchuelerRueckgabeListe.pdf";
private static final String STUDENT_LIST_WINDOW_TITLE = "Schüler Rückgabe Liste";
private static final String FILTER_STUDENT = "Schüler filtern";
private static final String MSG_CONFIRM_RETURN = "Sind alle Listen für die ausgewählten Lehrmaterialien unterschrieben vorhanden?";
private HorizontalLayout horizontalLayoutHeaderBar;
private HorizontalLayout horizontalLayoutActions;
private StudentMaterialSelector studentMaterialSelector;
private TextField textFieldStudentFilter;
private Button buttonManualReturn;
private Button buttonSaveSelectedData;
private Button buttonStudentList;
private ReturnViewModel returnViewModel;
private ConfirmDialog.Listener confirmListener;
@BindState(ReturnListStudent.class)
private State<Map<Grade, Map<Student, List<BorrowedMaterial>>>> gradeAndStudentsWithMaterials = new BasicState<>(
Map.class);
@BindState(Students.class)
public State<Collection<Student>> students = new BasicState<>(
Collection.class);
/**
* Default constructor gets injected. It initializes all views and builds
* the layout. It connects the viewmodel automatically. All parameter get
* injected.
*
* @param viewModelComposer
* the viewmodel composer
* @param returnViewModel
* the return viewmodel
* @param studentInformationViewModel
* the student information viewmodel
* */
@Inject
public ReturnView(ViewModelComposer viewModelComposer,
ReturnViewModel returnViewModel,
StudentInformationViewModel studentInformationViewModel) {
this.returnViewModel = returnViewModel;
init();
buildLayout();
bindViewModel(viewModelComposer, returnViewModel,
studentInformationViewModel);
}
/*
* The init method is responsible for initializing all member variables and
* view components. It configures the components and finally builds the
* layout.
*/
private void init() {
horizontalLayoutHeaderBar = new HorizontalLayout();
horizontalLayoutActions = new HorizontalLayout();
studentMaterialSelector = new StudentMaterialSelector();
buttonSaveSelectedData = new Button(SAVE_SELECTED_RETURNING);
buttonStudentList = new Button(STUDENT_LIST);
buttonManualReturn = new Button(MANUAL_RETURN);
textFieldStudentFilter = new TextField();
buttonSaveSelectedData.addStyleName("default");
buttonSaveSelectedData.setEnabled(false);
buttonStudentList.setEnabled(false);
textFieldStudentFilter.setInputPrompt(FILTER_STUDENT);
textFieldStudentFilter.setWidth("50%");
textFieldStudentFilter.setImmediate(true);
studentMaterialSelector.registerAsObserver(this);
studentMaterialSelector.setSizeFull();
addListeners();
}
/*
* Builds the layout.
*/
private void buildLayout() {
horizontalLayoutHeaderBar.setWidth("100%");
horizontalLayoutHeaderBar.setSpacing(true);
horizontalLayoutActions.setSpacing(true);
setSizeFull();
setSpacing(true);
setMargin(true);
horizontalLayoutActions.addComponent(buttonSaveSelectedData);
horizontalLayoutActions.addComponent(buttonManualReturn);
horizontalLayoutActions.addComponent(buttonStudentList);
horizontalLayoutHeaderBar.addComponent(textFieldStudentFilter);
horizontalLayoutHeaderBar.addComponent(horizontalLayoutActions);
horizontalLayoutHeaderBar.setComponentAlignment(
horizontalLayoutActions, Alignment.MIDDLE_RIGHT);
horizontalLayoutHeaderBar.setComponentAlignment(textFieldStudentFilter,
Alignment.MIDDLE_LEFT);
horizontalLayoutHeaderBar.setExpandRatio(textFieldStudentFilter, 1);
addComponent(horizontalLayoutHeaderBar);
addComponent(studentMaterialSelector);
setExpandRatio(studentMaterialSelector, 1);
}
/*
* General listener method. It adds a listener to the confirm dialog as well
* as to the state for the students and grade and calls a sub method which
* add listeners as well.
*/
private void addListeners() {
confirmListener = new ConfirmDialog.Listener() {
private static final long serialVersionUID = -2819494096932449586L;
@Override
public void onClose(ConfirmDialog dialog) {
if (dialog.isConfirmed()) {
HashSet<BorrowedMaterial> materials = studentMaterialSelector
.getCurrentlySelectedBorrowedMaterials();
returnTeachingMaterials(materials);
}
}
};
gradeAndStudentsWithMaterials
.addStateChangeListener(new StateChangeListener() {
@Override
public void stateChange(Object value) {
if (value == null) {
return;
}
updateReturnList();
}
});
addButtonListeners();
}
/*
* Adds listeners to the buttons. ClickListener are added to the save,
* manual return and student list printing button.
*/
private void addButtonListeners() {
buttonSaveSelectedData.addClickListener(new ClickListener() {
private static final long serialVersionUID = -9208324317096088956L;
@Override
public void buttonClick(ClickEvent event) {
ConfirmDialog.show(MSG_CONFIRM_RETURN, confirmListener);
}
});
buttonStudentList.addClickListener(new ClickListener() {
private static final long serialVersionUID = -7743939402341845477L;
@Override
public void buttonClick(ClickEvent event) {
doStudentListPrinting();
}
});
buttonManualReturn.addClickListener(new ClickListener() {
private static final long serialVersionUID = 6196708024508507923L;
@Override
public void buttonClick(ClickEvent event) {
HashSet<Student> selectedStudents = (HashSet<Student>) studentMaterialSelector
.getCurrentlySelectedStudents();
if (selectedStudents.size() == 0) {
SelectStudentPopupWindow sspw = new SelectStudentPopupWindow(
MANUAL_RETURN_TITLE, ReturnView.this, students
.get());
getUI().addWindow(sspw);
} else if (selectedStudents.size() == 1) {
// This loop runs only once
for (Student student : selectedStudents) {
ManualProcessPopupWindow mlpw = new ManualProcessPopupWindow(
ReturnView.this, student);
getUI().addWindow(mlpw);
}
}
}
});
textFieldStudentFilter.addTextChangeListener(new TextChangeListener() {
private static final long serialVersionUID = -8656489769177447342L;
@Override
public void textChange(TextChangeEvent event) {
studentMaterialSelector.setFilterString(event.getText());
}
});
}
/*
* This method triggers the pdf creation of a student list. The pdf is
* created for the selected students in the StudentMaterialSelector. It is
* possible to create multiple pdfs (meaning the pdf having multiple pages)
* when multiple students or classes are selected.
*/
private void doStudentListPrinting() {
LinkedHashMap<Student, List<BorrowedMaterial>> informationForPdf = getPdfInformationFromStundentMaterialSelector();
if (informationForPdf != null) {
Set<PDFStudentList.Builder> builders = new LinkedHashSet<PDFStudentList.Builder>();
for (Student student : informationForPdf.keySet()) {
PDFStudentList.Builder builder = new PDFStudentList.Builder()
.returnList(informationForPdf.get(student));
builders.add(builder);
}
ByteArrayOutputStream baos = new PDFStudentList(builders)
.createByteArrayOutputStreamForPDF();
if (baos != null) {
String fileNameIncludingHash = "" + new Date().hashCode() + "_"
+ STUDENT_LIST_PDF;
StreamResource sr = new StreamResource(
new PDFHandler.PDFStreamSource(baos),
fileNameIncludingHash);
new PrintingComponent(sr, STUDENT_LIST_WINDOW_TITLE);
}
}
}
/*
* Collects all information needed for the pdf generation from the
* StudentMaterialSelector. The information is collected and processed. It
* gets sorted and applied to the needed data structure.
*
* @return all information needed for the pdf generation from the
* StudentMaterialSelector
*/
private LinkedHashMap<Student, List<BorrowedMaterial>> getPdfInformationFromStundentMaterialSelector() {
HashSet<BorrowedMaterial> allSelectedMaterials = studentMaterialSelector
.getCurrentlySelectedBorrowedMaterials();
HashSet<Student> allSelectedStudents = studentMaterialSelector
.getCurrentlySelectedStudents();
LinkedHashMap<Student, List<BorrowedMaterial>> studentsWithMaterials = new LinkedHashMap<Student, List<BorrowedMaterial>>();
// Sort for grades and students
TreeMap<Grade, List<Student>> treeToSortForGrades = new TreeMap<Grade, List<Student>>();
for (Student student : allSelectedStudents) {
if (treeToSortForGrades.containsKey(student.getGrade())) {
List<Student> studentsInGrade = treeToSortForGrades.get(student
.getGrade());
if (studentsInGrade.contains(student)) {
continue;
}
studentsInGrade.add(student);
Collections.sort(studentsInGrade);
treeToSortForGrades.put(student.getGrade(), studentsInGrade);
} else {
List<Student> studentList = new ArrayList<Student>();
studentList.add(student);
treeToSortForGrades.put(student.getGrade(), studentList);
}
}
// Extract all the informationen needed to create the pdf
for (Grade grade : treeToSortForGrades.keySet()) {
List<Student> studentsInGrade = treeToSortForGrades.get(grade);
for (Student student : studentsInGrade) {
for (BorrowedMaterial material : allSelectedMaterials) {
if (student.equals(material.getStudent())) {
if (studentsWithMaterials.containsKey(student)) {
List<BorrowedMaterial> currentlyAddedMaterials = studentsWithMaterials
.get(student);
currentlyAddedMaterials.add(material);
Collections.sort(currentlyAddedMaterials);
studentsWithMaterials.put(student,
currentlyAddedMaterials);
} else {
List<BorrowedMaterial> materialList = new ArrayList<BorrowedMaterial>();
materialList.add(material);
studentsWithMaterials.put(student, materialList);
}
}
}
}
}
return studentsWithMaterials;
}
/**
* All given materials get returned. They can belong to different students
* in different classes.
*
* @param materials
* the materials which shall be returned
* */
public void returnTeachingMaterials(Set<BorrowedMaterial> materials) {
returnViewModel.setBorrowedMaterialsReturned(materials);
}
/*
* This method is called from the state change listener and is responsible
* for updating the StudentMaterialSelector accordingly.
*/
private void updateReturnList() {
// System.out.println("updated.");
// Map<Grade, Map<Student, List<BorrowedMaterial>>> map =
// gradeAndStudentsWithMaterials.get();
// for(Grade g : map.keySet()) {
// Map<Student, List<BorrowedMaterial>> map2 = map.get(g);
// for(Student s : map2.keySet()) {
// List<BorrowedMaterial> lbm = map2.get(s);
// for(BorrowedMaterial bm : lbm) {
// System.out.println("g: " + g + " s: " + s.getFirstname() + " " +
// s.getLastname() + " bm: " + bm.getTeachingMaterial().getName() );
studentMaterialSelector
.setGradesAndStudentsWithMaterials(gradeAndStudentsWithMaterials
.get());
}
/*
* Binds the view model.
*/
private void bindViewModel(ViewModelComposer viewModelComposer,
Object... viewModels) {
try {
viewModelComposer.bind(this, viewModels);
} catch (IllegalAccessException | NoSuchElementException
| UnsupportedOperationException e) {
e.printStackTrace();
}
}
/**
* This method is alway called when the view is entered (navigated to). It
* refreshes the viewmodel and thus updates the StudentMaterialSelector and
* other view components.
*
* @param event
* the event is not used.
* */
@Override
public void enter(ViewChangeEvent event) {
// returnViewModel.generateStudentReturnList();
returnViewModel.refresh();
}
/**
* Returns the title of this view.
*
* @return the title of this view
* */
@Override
public String getTitle() {
return TITLE;
}
/*
* Update procedure from the view model in order to get new information
* without the need to manually refresh.
*/
@Override
public void update() {
HashSet<Student> students = studentMaterialSelector
.getCurrentlySelectedStudents();
HashSet<BorrowedMaterial> materials = studentMaterialSelector
.getCurrentlySelectedBorrowedMaterials();
// Adapt save button
if (materials.size() >= 1) {
buttonSaveSelectedData.setEnabled(true);
} else {
buttonSaveSelectedData.setEnabled(false);
}
// Adapt manual return button
if (students.size() <= 1) {
buttonManualReturn.setEnabled(true);
} else {
buttonManualReturn.setEnabled(false);
}
// Adapt student list button
if (students.size() >= 1) {
buttonStudentList.setEnabled(true);
} else {
buttonStudentList.setEnabled(false);
}
}
}
|
package de.prob2.ui.beditor;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.ResourceBundle;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import de.prob2.ui.internal.StageManager;
import de.prob2.ui.internal.StopActions;
import javafx.fxml.FXML;
import javafx.scene.layout.BorderPane;
import javafx.stage.FileChooser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class BEditorView extends BorderPane {
private static final Logger LOGGER = LoggerFactory.getLogger(BEditorView.class);
private static final Charset EDITOR_CHARSET = Charset.forName("UTF-8");
@FXML
private BEditor beditor;
private Path path;
private ResourceBundle bundle;
@Inject
private BEditorView(StageManager stageManager, ResourceBundle bundle, StopActions stopActions) {
stageManager.loadFXML(this, "beditorView.fxml");
this.bundle = bundle;
setHint();
stopActions.add(beditor::stopHighlighting);
}
public void setHint(){
this.path = null;
beditor.clear();
beditor.appendText(bundle.getString("beditor.hint"));
beditor.getStyleClass().add("editor");
beditor.startHighlighting();
beditor.setEditable(false);
}
public void setEditorText(String text, Path path) {
this.path = path;
beditor.clear();
beditor.appendText(text);
beditor.getStyleClass().add("editor");
beditor.startHighlighting();
beditor.setEditable(true);
}
@FXML
private void handleSave() {
//Maybe add something for the user, that reloads the machine automatically?
if(path != null) {
try {
Files.write(path, beditor.getText().getBytes(EDITOR_CHARSET), StandardOpenOption.TRUNCATE_EXISTING);
} catch (IOException e) {
LOGGER.error(bundle.getString("beditor.couldNotSaveFile"), e);
}
}
}
@FXML
private void handleSaveAs() {
if(path != null) {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle(bundle.getString("preferences.stage.tabs.general.selectLocation"));
fileChooser.getExtensionFilters().add(new FileChooser.ExtensionFilter("Classical B Files", "*.mch", "*.ref", "*.imp"));
File openFile = fileChooser.showSaveDialog(getScene().getWindow());
if (openFile != null) {
File newFile = new File(openFile.getAbsolutePath() + (openFile.getName().contains(".") ? "" : ".mch"));
StandardOpenOption option = StandardOpenOption.CREATE;
if (newFile.exists()) {
option = StandardOpenOption.TRUNCATE_EXISTING;
}
try {
Files.write(newFile.toPath(), beditor.getText().getBytes(EDITOR_CHARSET), option);
path = newFile.toPath();
} catch (IOException e) {
LOGGER.error(bundle.getString("beditor.couldNotSaveFile"), e);
}
}
}
}
}
|
// $Id: Invoker.java,v 1.1 2004/06/29 03:14:22 mdb Exp $
package com.samskivert.util;
import java.util.HashMap;
import java.util.Iterator;
import com.samskivert.Log;
/**
* The invoker is used to invoke self-contained units of code on an
* invoking thread. Each invoker is associated with its own thread and
* that thread is used to invoke all of the units posted to that invoker
* in the order in which they were posted. The invoker also provides a
* convenient mechanism for processing the result of an invocation back on
* the main thread.
*
* <p> The invoker is a useful tool for services that need to block and
* therefore cannot be run on the main thread. For example, an interactive
* application might provide an invoker on which to run database queries.
*
* <p> Bear in mind that each invoker instance runs units on its own
* thread and care must be taken to ensure that code running on separate
* invokers properly synchronizes access to shared information. Where
* possible, complete isolation of the services provided by a particular
* invoker is desirable.
*/
public class Invoker extends LoopingThread
{
/**
* The unit encapsulates a unit of executable code that will be run on
* the invoker thread. It also provides facilities for additional code
* to be run on the main thread once the primary code has completed on
* the invoker thread.
*/
public static abstract class Unit implements Runnable
{
/** The time at which this unit was placed on the queue. */
public long queueStamp;
/** The default constructor. */
public Unit () {}
/** Creates an invoker unit which will report the supplied name in
* {@link #toString}. */
public Unit (String name)
{
_name = name;
}
/**
* This method is called on the invoker thread and should be used
* to perform the primary function of the unit. It can return true
* to cause the <code>handleResult</code> method to be
* subsequently invoked on the dobjmgr thread (generally to allow
* the results of the invocation to be acted upon back in the
* context of the regular world) or false to indicate that no
* further processing should be performed.
*
* @return true if the <code>handleResult</code> method should be
* invoked on the main thread, false if not.
*/
public abstract boolean invoke ();
/**
* Invocation unit implementations can implement this function to
* perform any post-unit-invocation processing back on the main
* thread. It will be invoked if <code>invoke</code> returns true.
*/
public void handleResult ()
{
// do nothing by default
}
// we want to be a runnable to make the receiver interface simple,
// but we'd like for invocation unit implementations to be able to
// put their result handling code into an aptly named method
public void run ()
{
handleResult();
}
/** Returns the name of this invoker. */
public String toString ()
{
return _name;
}
protected String _name = "Unknown";
}
/**
* Creates an invoker that will post results to the supplied result
* receiver.
*/
public Invoker (String name, RunQueue resultReceiver)
{
super(name);
_receiver = resultReceiver;
}
/**
* Posts a unit to this invoker for subsequent invocation on the
* invoker's thread.
*/
public void postUnit (Unit unit)
{
// note the time
unit.queueStamp = System.currentTimeMillis();
// and append it to the queue
_queue.append(unit);
}
// documentation inherited
public void iterate ()
{
// pop the next item off of the queue
Unit unit = (Unit) _queue.get();
long start;
if (PERF_TRACK) {
// record the time spent on the queue as a special unit
start = System.currentTimeMillis();
_unitsRun++;
// record the time spent on the queue as a special unit
recordMetrics("queue_wait_time", start - unit.queueStamp);
}
try {
willInvokeUnit(unit, start);
if (unit.invoke()) {
// if it returned true, we post it to the receiver thread
// to invoke the result processing
_receiver.postRunnable(unit);
}
didInvokeUnit(unit, start);
} catch (Throwable t) {
Log.warning("Invocation unit failed [unit=" + unit + "].");
Log.logStackTrace(t);
}
}
/**
* Shuts down the invoker thread by queueing up a unit that will cause
* the thread to exit after all currently queued units are processed.
*/
public void shutdown ()
{
_queue.append(new Unit() {
public boolean invoke () {
_running = false;
return false;
}
});
}
/**
* Called before we process an invoker unit.
*
* @param unit the unit about to be invoked.
* @param start a timestamp recorded immediately before invocation if
* {@link #PERF_TRACK} is enabled, 0L otherwise.
*/
protected void willInvokeUnit (Unit unit, long start)
{
}
/**
* Called before we process an invoker unit.
*
* @param unit the unit about to be invoked.
* @param start a timestamp recorded immediately before invocation if
* {@link #PERF_TRACK} is enabled, 0L otherwise.
*/
protected void didInvokeUnit (Unit unit, long start)
{
// track some performance metrics
if (PERF_TRACK) {
long duration = System.currentTimeMillis() - start;
Object key = unit.getClass();
recordMetrics(key, duration);
// report long runners
if (duration > 5000L) {
Log.warning("Really long invoker unit [unit=" + unit +
" (" + key + "), time=" + duration + "ms].");
} else if (duration > 500L) {
Log.info("Long invoker unit [unit=" + unit +
" (" + key + "), time=" + duration + "ms].");
}
}
}
protected void recordMetrics (Object key, long duration)
{
UnitProfile prof = (UnitProfile)_tracker.get(key);
if (prof == null) {
_tracker.put(key, prof = new UnitProfile());
}
prof.record(duration);
}
/** Used to track profile information on invoked units. */
protected static class UnitProfile
{
public void record (long duration) {
_totalElapsed += duration;
_histo.addValue((int)duration);
}
public void clear () {
_totalElapsed = 0L;
_histo.clear();
}
public String toString () {
int count = _histo.size();
return _totalElapsed + "ms/" + count + " = " +
(_totalElapsed/count) + "ms avg " +
StringUtil.toString(_histo.getBuckets());
}
// track in buckets of 50ms up to 500ms
protected Histogram _histo = new Histogram(0, 50, 10);
protected long _totalElapsed;
}
/** The invoker's queue of units to be executed. */
protected Queue _queue = new Queue();
/** The result receiver with which we're working. */
protected RunQueue _receiver;
/** Tracks the counts of invocations by unit's class. */
protected HashMap _tracker = new HashMap();
/** The total number of invoker units run since the last report. */
protected int _unitsRun;
/** Whether or not to track invoker unit performance. */
protected static final boolean PERF_TRACK = true;
}
|
package distance;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import node.Node;
import node.NodeIndexer;
import costmodel.CostModel;
/**
* Implements an exponential algorithm for the tree edit distance. It computes
* all possible TED mappings between two trees and calculated their minimal
* cost.
*
* @param C type of cost model.
* @param D type of node data.
*/
public class AllPossibleMappingsTED<C extends CostModel, D> {
/**
* Indexer of the source tree.
*
* @see node.NodeIndexer
*/
private NodeIndexer it1;
/**
* Indexer of the destination tree.
*
* @see node.NodeIndexer
*/
private NodeIndexer it2;
/**
* The size of the source input tree.
*/
private int size1;
/**
* The size of the destination tree.
*/
private int size2;
/**
* Cost model to be used for calculating costs of edit operations.
*/
private C costModel;
/**
* Constructs the AllPossibleMappingsTED algorithm with a specific cost model.
*
* @param costModel a cost model used in the algorithm.
*/
public AllPossibleMappingsTED(C costModel) {
this.costModel = costModel;
}
/**
* Computes the tree edit distance between two trees by trying all possible
* TED mappings. It uses the specified cost model.
*
* @param t1 source tree.
* @param t2 destination tree.
* @return the tree edit distance between two trees.
*/
public float computeEditDistance(Node<D> t1, Node<D> t2) {
// Index the nodes of both input trees.
init(t1, t2);
ArrayList<ArrayList<int[]>> mappings = generateAllOneToOneMappings();
removeNonTEDMappings(mappings);
return getMinCost(mappings);
}
/**
* Indexes the input trees.
*
* @param t1 source tree.
* @param t2 destination tree.
*/
public void init(Node<D> t1, Node<D> t2) {
it1 = new NodeIndexer(t1, costModel);
it2 = new NodeIndexer(t2, costModel);
size1 = it1.getSize();
size2 = it2.getSize();
}
/**
* Generate all possible 1-1 mappings.
*
* <p>These mappings do not conform to TED conditions (sibling-order and
* ancestor-descendant).
*
* <p>A mapping is a list of pairs (arrays) of preorder IDs (identifying
* nodes).
*
* @return set of all 1-1 mappings.
*/
private ArrayList<ArrayList<int[]>> generateAllOneToOneMappings() {
// Start with an empty mapping - all nodes are deleted or inserted.
ArrayList<ArrayList<int[]>> mappings = new ArrayList<ArrayList<int[]>>(1);
mappings.add(new ArrayList<int[]>(size1 + size2));
// Add all deleted nodes.
for (int n1 = 0; n1 < size1; n1++) {
mappings.get(0).add(new int[]{n1, -1});
}
// Add all inserted nodes.
for (int n2 = 0; n2 < size2; n2++) {
mappings.get(0).add(new int[]{-1, n2});
}
// For each node in the source tree.
for (int n1 = 0; n1 < size1; n1++) {
// Duplicate all mappings and store in mappings_copy.
ArrayList<ArrayList<int[]>> mappings_copy = deepMappingsCopy(mappings);
// For each node in the destination tree.
for (int n2 = 0; n2 < size2; n2++) {
// For each mapping (produced for all n1 values smaller than
// current n1).
for (ArrayList<int[]> m : mappings_copy) {
// Produce new mappings with the pair (n1, n2) by adding this
// pair to all mappings where it is valid to add.
boolean element_add = true;
// Verify if (n1, n2) can be added to mapping m.
// All elements in m are checked with (n1, n2) for possible
// violation.
// One-to-one condition.
for (int[] e : m) {
// n1 is not in any of previous mappings
if (e[0] != -1 && e[1] != -1 && e[1] == n2) {
element_add = false;
// System.out.println("Add " + n2 + " false.");
break;
}
}
// New mappings must be produced by duplicating a previous
// mapping and extending it by (n1, n2).
if (element_add) {
ArrayList<int[]> m_copy = deepMappingCopy(m);
m_copy.add(new int[]{n1, n2});
// If a pair (n1,n2) is added, (n1,-1) and (-1,n2) must be removed.
removeMappingElement(m_copy, new int[]{n1, -1});
removeMappingElement(m_copy, new int[]{-1, n2});
mappings.add(m_copy);
}
}
}
}
return mappings;
}
/**
* Given all 1-1 mappings, discard these that violate TED conditions
* (ancestor-descendant and sibling order).
*
* @param mappings set of all 1-1 mappings.
*/
private void removeNonTEDMappings(ArrayList<ArrayList<int[]>> mappings) {
// Validate each mapping separately.
// Iterator safely removes mappings while iterating.
for (Iterator<ArrayList<int[]>> mit = mappings.iterator(); mit.hasNext();) {
ArrayList<int[]> m = mit.next();
if (!isTEDMapping(m)) {
mit.remove();
}
}
}
/**
* Test if a 1-1 mapping is a TED mapping.
*
* @param m a 1-1 mapping.
* @return {@code true} if {@code m} is a TED mapping, and {@code false}
* otherwise.
*/
boolean isTEDMapping(ArrayList<int[]> m) {
// Validate each pair of pairs of mapped nodes in the mapping.
for (int[] e1 : m) {
// Use only pairs of mapped nodes for validation.
if (e1[0] == -1 || e1[1] == -1) {
continue;
}
for (int[] e2 : m) {
// Use only pairs of mapped nodes for validation.
if (e2[0] == -1 || e2[1] == -1) {
continue;
}
// If any of the conditions below doesn't hold, discard m.
// Validate ancestor-descendant condition.
boolean a = e1[0] < e2[0] && it1.preL_to_preR[e1[0]] < it1.preL_to_preR[e2[0]];
boolean b = e1[1] < e2[1] && it2.preL_to_preR[e1[1]] < it2.preL_to_preR[e2[1]];
if ((a && !b) || (!a && b)) {
// Discard the mapping.
// If this condition doesn't hold, the next condition
// doesn't have to be verified any more and any other
// pair (e1, e2) doesn't have to be verified any more.
return false;
}
// Validate sibling-order condition.
a = e1[0] < e2[0] && it1.preL_to_preR[e1[0]] > it1.preL_to_preR[e2[0]];
b = e1[1] < e2[1] && it2.preL_to_preR[e1[1]] > it2.preL_to_preR[e2[1]];
if ((a && !b) || (!a && b)) {
// Discard the mapping.
return false;
}
}
}
return true;
}
/**
* Given list of all TED mappings, calculate the cost of the minimal-cost
* mapping.
*
* @param tedMappings set of all TED mappings.
* @return the minimal cost among all TED mappings.
*/
float getMinCost(ArrayList<ArrayList<int[]>> tedMappings) {
// Initialize min_cost to the upper bound.
float min_cost = size1 + size2;
// System.out.println("min_cost = " + min_cost);
// Verify cost of each mapping.
for (ArrayList<int[]> m : tedMappings) {
float m_cost = 0;
// Sum up edit costs for all elements in the mapping m.
for (int[] e : m) {
// Add edit operation cost.
if (e[0] > -1 && e[1] > -1) {
m_cost += costModel.ren(it1.preL_to_node[e[0]], it2.preL_to_node[e[1]]); // USE COST MODEL - rename e[0] to e[1].
} else if (e[0] > -1) {
m_cost += costModel.del(it1.preL_to_node[e[0]]); // USE COST MODEL - insert e[1].
} else {
m_cost += costModel.ins(it2.preL_to_node[e[1]]); // USE COST MODEL - delete e[0].
}
// Break as soon as the current min_cost is exceeded.
// Only for early loop break.
if (m_cost >= min_cost) {
break;
}
}
// Store the minimal cost - compare m_cost and min_cost
if (m_cost < min_cost) {
min_cost = m_cost;
}
// System.out.printf("min_cost = %.8f\n", min_cost);
}
return min_cost;
}
/**
* Makes a deep copy of a mapping.
*
* @param mapping mapping to copy.
* @return a mapping.
*/
private ArrayList<int[]> deepMappingCopy(ArrayList<int[]> mapping) {
ArrayList<int[]> mapping_copy = new ArrayList<int[]>(mapping.size());
for (int[] me : mapping) { // for each mapping element in a mapping
mapping_copy.add(Arrays.copyOf(me, me.length));
}
return mapping_copy;
}
/**
* Makes a deep copy of a set of mappings.
*
* @param mappings set of mappings to copy.
* @return set of mappings.
*/
private ArrayList<ArrayList<int[]>> deepMappingsCopy(ArrayList<ArrayList<int[]>> mappings) {
ArrayList<ArrayList<int[]>> mappings_copy = new ArrayList<ArrayList<int[]>>(mappings.size());
for (ArrayList<int[]> m : mappings) { // for each mapping in mappings
ArrayList<int[]> m_copy = new ArrayList<int[]>(m.size());
for (int[] me : m) { // for each mapping element in a mapping
m_copy.add(Arrays.copyOf(me, me.length));
}
mappings_copy.add(m_copy);
}
return mappings_copy;
}
/**
* Constructs a string representation of a set of mappings.
*
* @param mappings set of mappings to convert.
* @return string representation of a set of mappings.
*/
private String mappingsToString(ArrayList<ArrayList<int[]>> mappings) {
String result = "Mappings:\n";
for (ArrayList<int[]> m : mappings) {
result += "{";
for (int[] me : m) {
result += "[" + me[0] + "," + me[1] + "]";
}
result += "}\n";
}
return result;
}
/**
* Removes an element (edit operation) from a mapping by its value. In our
* case the element to remove can be always found in the mapping.
*
* @param m an edit mapping.
* @param e element to remove from {@code m}.
* @return {@code true} if {@code e} has been removed, and {@code false}
* otherwise.
*/
private boolean removeMappingElement(ArrayList<int[]> m, int[] e) {
for (int[] me : m) {
if (me[0] == e[0] && me[1] == e[1]) {
m.remove(me);
return true;
}
}
return false;
}
}
|
package dk.itu.kelvin.util;
// General utilities
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Properties;
// Utilities
import dk.itu.kelvin.util.WeightedGraph.Edge;
import dk.itu.kelvin.util.WeightedGraph.Node;
// Math
import dk.itu.kelvin.math.Epsilon;
/**
* ShortestPath class.
*
* @param <N> The type of nodes to find paths for.
* @param <E> The type of edges to find paths for.
*/
public final class ShortestPath<N extends Node, E extends Edge<N>> {
/**
* Maps the distance from the source to all different vertices in the graph.
*/
private final Map<N, Float> distance = new HashMap<>();
/**
* Maps vertices with the last edge on the vertex's shortest path.
*/
private final Map<N, N> edgeTo = new HashMap<>();
/**
* Priority queue holding all vertices.
*/
private final PriorityQueue<N> queue;
/**
* Configuration map of custom properties.
*/
private final Properties properties;
/**
* The starting node.
*/
private final N from;
/**
* The ending node.
*/
private final N to;
/**
* Initialize a new shortest path instance given a weighted graph and a
* source node.
*
* @param graph The weighted graph to use for constructing the path tree.
* @param from Starting point for the shortest path.
* @param to Ending point for the shortest path.
*/
public ShortestPath(
final WeightedGraph<N, E> graph,
final N from,
final N to
) {
this(graph, from, to, new Properties());
}
/**
* Initialize a new shortest path instance given a weighted graph and a
* source node along with any custom properties.
*
* @param graph The weighted graph to use for constructing the path
* tree.
* @param from Starting point for the shortest path.
* @param to Ending point for the shortest path.
* @param properties A configuration map of custom properties.
*/
public ShortestPath(
final WeightedGraph<N, E> graph,
final N from,
final N to,
final Properties properties
) {
this.from = from;
this.to = to;
this.properties = properties;
for (E edge: graph.edges()) {
for (N node: edge.nodes()) {
this.distance.put(node, Float.POSITIVE_INFINITY);
}
}
this.distance.put(from, 0.0f);
this.queue = new PriorityQueue<>(this.distance.size(), (a, b) -> {
return this.distance.get(a).compareTo(this.distance.get(b));
});
this.queue.add(from);
while (!this.queue.isEmpty()) {
N next = this.queue.poll();
// Bail out as soon as we've dequeued the node we're looking for.
if (next.equals(to)) {
break;
}
Map<N, E> neighbours = graph.neighbours(next);
if (neighbours == null || neighbours.isEmpty()) {
continue;
}
for (Map.Entry<N, E> neighbour: neighbours.entrySet()) {
N node = neighbour.getKey();
E edge = neighbour.getValue();
this.relax(next, node, edge);
}
}
}
/**
* Relax edge and update queue if changed.
*
* @param from The first node.
* @param to The second node.
* @param edge The edge between the nodes.
*/
private void relax(final N from, final N to, final E edge) {
if (from == null || to == null || edge == null) {
return;
}
double weight = edge.weight(from, to, this.properties);
double estimateFrom = edge.weight(from, this.to, this.properties);
double estimateTo = edge.weight(to, this.to, this.properties);
weight -= estimateFrom - estimateTo;
double distFrom = this.distance.get(from);
double distTo = this.distance.get(to);
if (Epsilon.greater(distTo, distFrom + weight)) {
this.distance.put(to, (float) (distFrom + weight));
this.edgeTo.put(to, from);
this.queue.remove(to);
this.queue.add(to);
}
}
/**
* Get the distance of the shortest path.
*
* @return The distance of the shortest path.
*/
public float distance() {
return this.distance.get(this.to);
}
/**
* Check if a path exists.
*
* @return A boolean indicating whether or not a path exists.
*/
public boolean hasPath() {
return this.distance() < Float.POSITIVE_INFINITY;
}
/**
* Get the nodes in the shortest path.
*
* @return A list of nodes in the shortest path.
*/
public List<N> path() {
if (!this.hasPath()) {
return null;
}
List<N> path = new ArrayList<>();
// Add the source node to the path.
path.add(this.to);
for (
N n = this.edgeTo.get(this.to);
n != null;
n = this.edgeTo.get(n)
) {
path.add(n);
}
// The path is located in reverse order and is therefore backwards. Reverse
// it to correct this.
Collections.reverse(path);
return path;
}
}
|
package editor.gui.editor;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.GridLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.DefaultListSelectionModel;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.ListSelectionModel;
import editor.collection.CardList;
import editor.collection.deck.Deck;
import editor.database.FormatConstraints;
import editor.database.attributes.Legality;
import editor.database.attributes.ManaCost;
import editor.database.attributes.ManaType;
import editor.database.card.Card;
import editor.database.symbol.ColorSymbol;
import editor.database.symbol.ManaSymbol;
import editor.gui.generic.ComponentUtils;
import editor.gui.settings.SettingsDialog;
import editor.util.UnicodeSymbols;
@SuppressWarnings("serial")
public class LegalityPanel extends Box
{
/** Item to show for searching the main deck. */
private static final String MAIN_DECK = "Main Deck";
/** Item to show for searching all of the lists. */
private static final String ALL_LISTS = "All Lists";
private static final Pattern PARTNER_PATTERN = Pattern.compile("partner(?: with (.+) \\()?");
private List<String> illegal;
private List<String> legal;
private Map<String, List<String>> warnings;
private JList<String> legalList;
private JList<String> illegalList;
private JList<String> warningsList;
public LegalityPanel(EditorFrame editor)
{
super(BoxLayout.Y_AXIS);
setPreferredSize(new Dimension(400, 250));
warnings = FormatConstraints.FORMAT_NAMES.stream().collect(Collectors.toMap(Function.identity(), (l) -> new ArrayList<String>()));
// Panel containing format lists
JPanel listsPanel = new JPanel(new GridLayout(1, 2));
add(listsPanel);
JPanel legalPanel = new JPanel(new BorderLayout());
legalPanel.setBorder(BorderFactory.createTitledBorder("Legal in:"));
listsPanel.add(legalPanel);
legalList = new JList<>();
legalList.setSelectionModel(new DefaultListSelectionModel()
{
@Override
public int getSelectionMode()
{
return ListSelectionModel.SINGLE_SELECTION;
}
@Override
public void setSelectionInterval(int index0, int index1)
{
super.setSelectionInterval(-1, -1);
}
});
legalPanel.add(new JScrollPane(legalList), BorderLayout.CENTER);
JPanel illegalPanel = new JPanel(new BorderLayout());
illegalPanel.setBorder(BorderFactory.createTitledBorder("Illegal in:"));
listsPanel.add(illegalPanel);
illegalList = new JList<>();
illegalList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
illegalPanel.add(new JScrollPane(illegalList), BorderLayout.CENTER);
// Panel containing check box for enabling commander search
Box cmdrPanel = Box.createHorizontalBox();
JCheckBox cmdrCheck = new JCheckBox("", SettingsDialog.settings().editor.legality.searchForCommander);
cmdrCheck.setText(cmdrCheck.isSelected() ? "Search for commander in:" : "Search for commander");
cmdrPanel.add(cmdrCheck);
List<String> names = new ArrayList<>(List.of(MAIN_DECK, ALL_LISTS));
names.addAll(editor.getExtraNames());
var cmdrBox = new JComboBox<>(names.toArray(String[]::new));
cmdrBox.setVisible(SettingsDialog.settings().editor.legality.searchForCommander);
if (SettingsDialog.settings().editor.legality.main)
cmdrBox.setSelectedIndex(names.indexOf(MAIN_DECK));
else if (SettingsDialog.settings().editor.legality.all)
cmdrBox.setSelectedIndex(names.indexOf(ALL_LISTS));
else
{
String name = SettingsDialog.settings().editor.legality.list;
cmdrBox.setSelectedIndex(names.contains(name) ? names.indexOf(name) : names.indexOf(MAIN_DECK));
}
cmdrBox.setMaximumSize(cmdrBox.getPreferredSize());
cmdrPanel.add(cmdrBox);
cmdrPanel.add(Box.createHorizontalGlue());
add(cmdrPanel);
// Panel containing check box for including a sideboard
final JCheckBox sideCheck;
final JComboBox<String> sideCombo;
if (!editor.getExtraNames().isEmpty())
{
String sb = SettingsDialog.settings().editor.legality.sideboard;
add(Box.createVerticalStrut(2));
Box sideboardBox = Box.createHorizontalBox();
sideCheck = new JCheckBox("", !sb.isEmpty() && editor.getExtraNames().contains(sb));
sideboardBox.add(sideCheck);
sideCombo = new JComboBox<>(editor.getExtraNames().toArray(String[]::new));
sideCombo.setSelectedIndex(Math.max(0, editor.getExtraNames().indexOf(sb)));
sideCombo.setMaximumSize(sideCombo.getPreferredSize());
sideboardBox.add(sideCombo);
sideboardBox.add(Box.createHorizontalGlue());
add(sideboardBox);
}
else
{
sideCheck = null;
sideCombo = null;
}
ActionListener listener = (e) -> {
if (!editor.getExtraNames().isEmpty())
{
sideCheck.setText(sideCheck.isSelected() ? "Sideboard is:" : "Include sideboard");
sideCombo.setVisible(sideCheck.isSelected());
}
cmdrCheck.setText("Search for commander" + (cmdrCheck.isSelected() ? " in:" : ""));
cmdrBox.setVisible(cmdrCheck.isSelected());
checkLegality(editor.getList(EditorFrame.MAIN_DECK), !cmdrCheck.isSelected() ? new Deck() : switch (cmdrBox.getSelectedItem().toString()) {
case MAIN_DECK -> editor.getList(EditorFrame.MAIN_DECK);
case ALL_LISTS -> editor.getExtraCards();
default -> editor.getList(cmdrBox.getSelectedItem().toString());
}, !editor.getExtraNames().isEmpty() && sideCheck.isSelected() ? Optional.of(editor.getList(sideCombo.getItemAt(sideCombo.getSelectedIndex()))) : Optional.empty());
};
if (!editor.getExtraNames().isEmpty())
{
sideCheck.addActionListener(listener);
sideCombo.addActionListener(listener);
}
cmdrCheck.addActionListener(listener);
cmdrBox.addActionListener(listener);
JPanel warningsPanel = new JPanel(new BorderLayout());
warningsPanel.setBorder(BorderFactory.createTitledBorder("Warnings"));
add(warningsPanel);
warningsList = new JList<>();
warningsList.setSelectionModel(new DefaultListSelectionModel()
{
@Override
public int getSelectionMode()
{
return ListSelectionModel.SINGLE_SELECTION;
}
@Override
public void setSelectionInterval(int index0, int index1)
{
super.setSelectionInterval(-1, -1);
}
});
warningsList.setCellRenderer((l, v, i, s, c) -> {
Matcher m = ManaCost.MANA_COST_PATTERN.matcher(v);
if (m.find())
{
Box cell = Box.createHorizontalBox();
cell.add(new JLabel(v.substring(0, m.start())));
for (ManaSymbol symbol : ManaCost.parseManaCost(m.group()))
cell.add(new JLabel(symbol.getIcon(ComponentUtils.TEXT_SIZE)));
return cell;
}
else
return new JLabel(v);
});
warningsPanel.add(new JScrollPane(warningsList), BorderLayout.CENTER);
illegalList.addListSelectionListener((e) -> {
if (illegalList.getSelectedIndex() >= 0)
warningsList.setListData(warnings.get(illegalList.getSelectedValue()).stream().map((w) -> UnicodeSymbols.BULLET + " " + w).toArray(String[]::new));
else
warningsList.setListData(new String[0]);
});
listener.actionPerformed(new ActionEvent(cmdrCheck, 0, "", ActionEvent.ACTION_PERFORMED));
}
public void checkLegality(CardList deck, CardList commanderSearch, Optional<CardList> sideboard)
{
for (var warning : warnings.values())
warning.clear();
// Deck size
for (String format : FormatConstraints.FORMAT_NAMES)
{
final FormatConstraints constraints = FormatConstraints.CONSTRAINTS.get(format);
if (constraints.hasCommander)
{
if (((commanderSearch.isEmpty() || commanderSearch == deck) && deck.total() != constraints.deckSize) || ((!commanderSearch.isEmpty() && commanderSearch != deck) && deck.total() != constraints.deckSize - 1))
warnings.get(format).add("Deck does not contain exactly " + (constraints.deckSize - 1) + " cards plus a commander");
}
else
{
if (deck.total() < constraints.deckSize)
warnings.get(format).add("Deck contains fewer than " + constraints.deckSize + " cards");
}
}
Map<Card, Integer> isoNameCounts = new HashMap<>();
for (Card c : deck)
{
boolean counted = false;
for (Card name : isoNameCounts.keySet())
{
if (name.compareName(c) == 0)
{
isoNameCounts.compute(name, (k, v) -> v += deck.getEntry(name).count());
counted = true;
break;
}
}
if (!counted)
isoNameCounts.put(c, deck.getEntry(c).count());
}
for (Card c : deck)
{
for (String format : FormatConstraints.FORMAT_NAMES)
{
final int maxCopies = FormatConstraints.CONSTRAINTS.get(format).maxCopies;
if (!c.legalityIn(format).isLegal)
warnings.get(format).add(c.unifiedName() + " is illegal in " + format);
else if (isoNameCounts.containsKey(c) && !c.ignoreCountRestriction())
{
if (c.legalityIn(format) == Legality.RESTRICTED && isoNameCounts.get(c) > 1)
warnings.get(format).add(c.unifiedName() + " is restricted in " + format);
else if (isoNameCounts.get(c) > maxCopies)
warnings.get(format).add("Deck contains more than " + maxCopies + " copies of " + c.unifiedName());
}
}
}
// Commander exists and matches deck color identity
if (!commanderSearch.isEmpty())
{
Set<ManaType> deckColorIdentity = deck.stream().flatMap((c) -> c.colorIdentity().stream()).collect(Collectors.toSet());
for (final String format : FormatConstraints.FORMAT_NAMES)
{
if (FormatConstraints.CONSTRAINTS.get(format).hasCommander)
{
boolean valid = false;
var possibleCommanders = commanderSearch.stream().filter((c) -> c.commandFormats().contains(format)).collect(Collectors.toList());
for (Card c : new ArrayList<>(possibleCommanders))
{
if (c.colorIdentity().containsAll(deckColorIdentity))
{
valid = true;
break;
}
}
if (!valid)
{
var possiblePartners = possibleCommanders.stream()
.flatMap((c) -> c.normalizedOracle().stream().map((o) -> new SimpleEntry<>(c, PARTNER_PATTERN.matcher(o))))
.filter((e) -> e.getKey().commandFormats().contains(format) && e.getValue().find())
.map((e) -> new SimpleEntry<>(e.getKey(), e.getValue().group(1) != null ? e.getValue().group(1).toLowerCase() : ""))
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
search: for (var p : possiblePartners.entrySet())
{
for (Card c : possibleCommanders)
{
var colorIdentity = new HashSet<ManaType>();
if (p.getValue().isEmpty())
{
if (c.normalizedOracle().stream().map((o) -> PARTNER_PATTERN.matcher(o)).anyMatch((m) -> m.find() && m.group(1) == null))
{
colorIdentity.addAll(p.getKey().colorIdentity());
colorIdentity.addAll(c.colorIdentity());
}
}
else if (p.getValue().equalsIgnoreCase(c.unifiedName()))
{
colorIdentity.addAll(p.getKey().colorIdentity());
colorIdentity.addAll(c.colorIdentity());
}
if (colorIdentity.containsAll(deckColorIdentity))
{
valid = true;
break search;
}
}
}
}
if (!valid)
warnings.get(format).add("Could not find a legendary creature whose color identity contains " +
deckColorIdentity.stream().sorted().map((t) -> ColorSymbol.SYMBOLS.get(t).toString()).collect(Collectors.joining()));
}
}
}
// Sideboard size
sideboard.ifPresent((sb) -> {
for (String format : FormatConstraints.FORMAT_NAMES)
{
int max = FormatConstraints.CONSTRAINTS.get(format).sideboardSize;
if (sb.total() > max)
warnings.get(format).add("Sideboard contains more than " + max + " cards");
}
});
illegal = warnings.keySet().stream().filter((s) -> !warnings.get(s).isEmpty()).collect(Collectors.toList());
Collections.sort(illegal);
legal = new ArrayList<>(FormatConstraints.FORMAT_NAMES);
legal.removeAll(illegal);
Collections.sort(legal);
warningsList.setListData(new String[0]);
legalList.setListData(legal.toArray(String[]::new));
illegalList.setListData(illegal.toArray(String[]::new));
}
}
|
package edu.berkeley.bid.comm;
import java.util.LinkedList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.Arrays;
import java.nio.*;
//import mpi.*;
public class AllReduce {
public class Machine {
/* Machine Configuration Variables */
int N; // Number of features
int D; // Depth of the network
int M; // Number of Machines
int imachine; // My identity
int [] allks; // k values
Layer [] layers; // All the layers
ByteBuffer [] sendbuf; // buffers, one for each destination in a group
ByteBuffer [] recbuf;
IVec finalMap; // Map to down from down --> up at layer D-1
LinkedList<Msg> [][] messages; // Message queue for the simulation
boolean doSim = true;
ExecutorService executor;
int trace = 0; // 0: no trace, 1: high-level, 2: everything
public Machine(int N0, int [] allks0, int imachine0, int M0, int bufsize, boolean doSim0, int trace0) {
N = N0;
M = M0;
doSim = doSim0;
if (doSim) {
imachine = imachine0;
} else {
/* try {
String [] args = {""};
MPI.InitThread(args, MPI.THREAD_MULTIPLE);
imachine = MPI.COMM_WORLD.getRank();
} catch (MPIException e) {
throw new RuntimeException("Couldnt init MPI "+e);
} */
}
allks = allks0;
D = allks.length;
trace = trace0;
layers = new Layer[D];
int left = 0;
int right = N;
int cumk = 1;
int maxk = 1;
for (int i = 0; i < D; i++) {
int k = allks[i];
layers[i] = new Layer(k, cumk, left, right, imachine, i);
int pimg = layers[i].posInMyGroup;
left = layers[i].left;
if (pimg > 0) left = layers[i].partBoundaries.data[pimg-1];
right = layers[i].partBoundaries.data[pimg];
cumk *= k;
maxk = Math.max(maxk, k);
}
executor = Executors.newFixedThreadPool(maxk); // set to 1 for sequential messaging.
sendbuf = new ByteBuffer[maxk];
recbuf = new ByteBuffer[maxk];
for (int i = 0; i < maxk; i++) {
sendbuf[i] = ByteBuffer.wrap(new byte[bufsize*4]);
recbuf[i] = ByteBuffer.wrap(new byte[bufsize*4]);
}
if (doSim) {
messages = new LinkedList[M][];
for (int i = 0; i < M; i++) {
messages[i] = new LinkedList[3*D];
for (int j = 0; j < 3*D; j++) {
messages[i][j] = new LinkedList<Msg>();
}
}
}
}
public void config(IVec downi, IVec upi) {
IVec [] outputs = new IVec[2];
for (int i = 0; i < D; i++) {
layers[i].config(downi, upi, outputs);
downi = outputs[0];
upi = outputs[1];
}
finalMap = IVec.mapInds(upi, downi);
}
public Vec reduce(Vec downv) {
for (int d = 0; d < D; d++) {
downv = layers[d].reduceDown(downv);
}
Vec upv = downv.mapFrom(finalMap);
for (int d = D-1; d >= 0; d
upv = layers[d].reduceUp(upv);
}
if (trace > 0) {
synchronized (AllReduce.this) {
System.out.format("machine %d reduce result nnz %d out of %d\n", imachine, upv.nnz(), upv.size());
}
}
return upv;
}
class Layer {
/* Layer Configuration Variables */
int k; // Size of this group
int left; // Left boundary of its indices
int right; // Right boundary of its indices
int depth;
int posInMyGroup; // Position in this machines group
int [] outNbr; // Machines we talk to
int [] inNbr; // Machines we listen to
IVec partBoundaries; // Partition boundaries
IVec [] downMaps; // Maps to indices below for down indices
IVec [] upMaps; // Maps to indices below for up indices
int downn; // Size of the down master list
int upn; // Size of the up vector
int [] dPartInds;
int [] uPartInds;
public Layer(int k0, int cumk, int left0, int right0, int imachine, int depth0) {
k = k0;
int i;
left = left0;
right = right0;
depth = depth0;
partBoundaries = new IVec(k);
inNbr = new int [k];
outNbr = new int [k];
dPartInds = new int[k+1];
uPartInds = new int[k+1];
int ckk = cumk * k;
posInMyGroup = (imachine % ckk) / cumk;
int ibase = imachine - posInMyGroup * cumk;
for (i = 0; i < k; i++) {
partBoundaries.data[i] = left + (int)(((long)(right - left)) * (i+1) / k);
outNbr[i] = ibase + i * cumk;
int toMe = (k + 2*posInMyGroup - i) % k;
inNbr[i] = ibase + toMe * cumk;
}
downMaps = new IVec[k];
upMaps = new IVec[k];
}
class ConfigThread implements Runnable {
IVec [] downp;
IVec [] upp;
IVec [] dtree;
IVec [] utree;
int i;
CountDownLatch latch;
public ConfigThread(IVec [] downp0, IVec [] upp0, IVec [] dtree0, IVec [] utree0, int i0, CountDownLatch latch0) {
downp = downp0;
upp = upp0;
dtree = dtree0;
utree = utree0;
i = i0;
latch = latch0;
}
public void run() {
sendbuf[i].clear();
recbuf[i].clear();
IntBuffer sbuf = sendbuf[i].asIntBuffer();
IntBuffer rbuf = recbuf[i].asIntBuffer();
int seg1 = downp[i].size();
int seg2 = seg1 + upp[i].size();
sbuf.put(seg1);
sbuf.put(seg2);
sbuf.put(downp[i].data, 0, seg1);
sbuf.put(upp[i].data, 0, seg2-seg1);
if (trace > 1) {
synchronized (AllReduce.this) {
System.out.format("config layer %d machine %d sent msg to %d, from %d, sizes %d %d\n", depth, imachine, outNbr[i], inNbr[i], sbuf.get(0), sbuf.get(1));
}
}
sendrecv(sbuf, seg2+2, outNbr[i], rbuf, rbuf.capacity(), inNbr[i], depth*3);
seg1 = rbuf.get();
seg2 = rbuf.get();
if (trace > 1) {
synchronized (AllReduce.this) {
System.out.format("config layer %d machine %d got msg from %d, sizes %d %d\n", depth, imachine, inNbr[i], seg1, seg2);
}
}
IVec downout = new IVec(seg1);
IVec upout = new IVec(seg2-seg1);
rbuf.get(downout.data, 0, seg1);
rbuf.get(upout.data, 0, seg2-seg1);
IVec.checkTree(dtree, downout, i, k);
IVec.checkTree(utree, upout, i, k);
downp[i] = downout;
upp[i] = upout;
latch.countDown();
}
}
public void config(IVec downi, IVec upi, IVec [] outputs) {
IVec [] downp = IVec.partition(downi, partBoundaries);
IVec [] upp = IVec.partition(upi, partBoundaries);
IVec [] dtree = new IVec[2*k-1];
IVec [] utree = new IVec[2*k-1];
if (trace > 0) {
synchronized (AllReduce.this) {
System.out.format("machine %d layer %d, dparts (%d", imachine, depth, downp[0].size());
for (int i = 1; i < downp.length; i++) System.out.format(", %d", downp[i].size());
System.out.format(") from %d, bounds %d %d\n", downi.size(), partBoundaries.data[0], partBoundaries.data[partBoundaries.size()-1]);
System.out.format("machine %d layer %d, uparts (%d", imachine, depth, upp[0].size());
for (int i = 1; i < upp.length; i++) System.out.format(", %d", upp[i].size());
System.out.format(") from %d, bounds %d %d\n", upi.size(), partBoundaries.data[0], partBoundaries.data[partBoundaries.size()-1]);
}
}
dPartInds[0] = 0;
uPartInds[0] = 0;
for (int i = 0; i < k; i++) {
dPartInds[i+1] = dPartInds[i] + downp[i].size();
uPartInds[i+1] = uPartInds[i] + upp[i].size();
}
CountDownLatch latch = new CountDownLatch(k);
for (int i = 0; i < k; i++) {
int ix = (i + posInMyGroup) % k; // Try to stagger the traffic
executor.execute(new ConfigThread(downp, upp, dtree, utree, ix, latch));
}
try { latch.await(); } catch (InterruptedException e) {}
IVec dmaster = dtree[0];
Arrays.fill(dtree, null);
downn = dmaster.size();
IVec umaster = utree[0];
Arrays.fill(utree, null);
upn = upi.size();
for (int i = 0; i < k; i++) {
downMaps[i] = IVec.mapInds(downp[i], dmaster);
upMaps[i] = IVec.mapInds(upp[i], umaster);
if (trace > 0) {
synchronized (AllReduce.this) {
System.out.format("machine %d dmap(%d) size %d\n", imachine, i, downMaps[i].size());
System.out.format("machine %d umap(%d) size %d\n", imachine, i, upMaps[i].size());
}
}
}
outputs[0] = dmaster;
outputs[1] = umaster;
}
public class ReduceDownThread implements Runnable {
Vec newv;
Vec downv;
int i;
CountDownLatch latch;
public ReduceDownThread(Vec newv0, Vec downv0, int i0, CountDownLatch latch0) {
newv = newv0;
downv = downv0;
i = i0;
latch = latch0;
}
public void run() {
sendbuf[i].clear();
recbuf[i].clear();
IntBuffer isbuf = sendbuf[i].asIntBuffer();
IntBuffer irbuf = recbuf[i].asIntBuffer();
FloatBuffer sbuf = sendbuf[i].asFloatBuffer();
FloatBuffer rbuf = recbuf[i].asFloatBuffer();
int msize = dPartInds[i+1] - dPartInds[i];
isbuf.put(msize);
sbuf.position(1);
sbuf.put(downv.data, dPartInds[i], msize);
if (trace > 1) {
synchronized (AllReduce.this) {
System.out.format("reduce layer %d machine %d sent msg to %d, from %d, size %d\n", depth, imachine, outNbr[i], inNbr[i], msize);
}
}
sendrecv(isbuf, msize+1, outNbr[i], irbuf, rbuf.capacity(), inNbr[i], depth*3+1);
msize = irbuf.get();
if (trace > 1) {
synchronized (AllReduce.this) {
System.out.format("reduce layer %d machine %d got msg from %d, size %d\n", depth, imachine, inNbr[i], msize);
}
}
Vec res = new Vec(msize);
rbuf.position(1);
rbuf.get(res.data, 0, msize);
synchronized (newv) {
res.addTo(newv, downMaps[i]);
}
latch.countDown();
}
}
public Vec reduceDown(Vec downv) {
Vec newv = new Vec(downn);
CountDownLatch latch = new CountDownLatch(k);
for (int i = 0; i < k; i++) {
int ix = (i + posInMyGroup) % k; // Try to stagger the traffic
executor.execute(new ReduceDownThread(newv, downv, ix, latch));
}
try { latch.await(); } catch (InterruptedException e) {}
return newv;
}
public class ReduceUpThread implements Runnable {
Vec newv;
Vec upv;
int i;
CountDownLatch latch;
public ReduceUpThread(Vec newv0, Vec upv0, int i0, CountDownLatch latch0) {
newv = newv0;
upv = upv0;
i = i0;
latch = latch0;
}
public void run () {
sendbuf[i].clear();
recbuf[i].clear();
IntBuffer isbuf = sendbuf[i].asIntBuffer();
IntBuffer irbuf = recbuf[i].asIntBuffer();
FloatBuffer sbuf = sendbuf[i].asFloatBuffer();
FloatBuffer rbuf = recbuf[i].asFloatBuffer();
Vec up = upv.mapFrom(upMaps[i]);
int msize = up.size();
isbuf.put(msize);
sbuf.position(1);
sbuf.put(up.data, 0, msize);
if (trace > 1) {
synchronized (AllReduce.this) {
System.out.format("reduce up layer %d machine %d sent msg to %d, from %d, size %d\n", depth, imachine, outNbr[i], inNbr[i], msize);
}
}
sendrecv(isbuf, msize+1, inNbr[i], irbuf, irbuf.capacity(), outNbr[i], depth*3+2);
msize = irbuf.get();
if (trace > 1) {
synchronized (AllReduce.this) {
System.out.format("reduce up layer %d machine %d got msg from %d, size %d\n", depth, imachine, inNbr[i], msize);
}
}
int psize = uPartInds[i+1] - uPartInds[i];
if (uPartInds[i+1] > newv.size()) throw new RuntimeException("ReduceUp index out of range "+uPartInds[i+1]+" "+newv.size());
if (msize != psize) throw new RuntimeException("ReduceUp size mismatch "+msize+" "+psize);
rbuf.position(1);
rbuf.get(newv.data, uPartInds[i], msize);
latch.countDown();
}
}
public Vec reduceUp(Vec upv) {
Vec newv = new Vec(upn);
CountDownLatch latch = new CountDownLatch(k);
for (int i = 0; i < k; i++) {
int ix = (i + posInMyGroup) % k; // Try to stagger the traffic
executor.execute(new ReduceUpThread(newv, upv, ix, latch));
}
try { latch.await(); } catch (InterruptedException e) {}
return newv;
}
}
public boolean sendrecv(IntBuffer sbuf, int sendn, int outi, IntBuffer rbuf, int recn, int ini, int tag) {
if (imachine == outi) {
Msg a = new Msg(sbuf, sendn, imachine, outi);
rbuf.clear();
rbuf.put(a.buf, 0, sendn);
rbuf.rewind();
return true;
} else {
if (doSim) {
synchronized (simNetwork[outi].messages[imachine][tag]) {
simNetwork[outi].messages[imachine][tag].add(new Msg(sbuf, sendn, imachine, outi));
simNetwork[outi].messages[imachine][tag].notify();
}
synchronized (messages[ini][tag]) {
while (messages[ini][tag].size() == 0) {
try {
messages[ini][tag].wait();
} catch (InterruptedException e) {}
}
Msg msg = messages[ini][tag].removeFirst();
rbuf.clear();
rbuf.put(msg.buf, 0, msg.size);
rbuf.rewind();
}
return true;
} else {
/* try {
sbuf.rewind();
rbuf.clear();
MPI.COMM_WORLD.sendRecv(sbuf, sendn, MPI.INT, outi, tag, rbuf, recn, MPI.INT, ini, tag);
sbuf.rewind();
rbuf.rewind();
} catch (MPIException e) {
throw new RuntimeException("Exception in sendrecv "+e);
} */
/* try {
sbuf.rewind();
rbuf.clear();
Request sreq = MPI.COMM_WORLD.iSend(sbuf, sendn, MPI.INT, outi, tag);
Request rreq = MPI.COMM_WORLD.iRecv(rbuf, recn, MPI.INT, ini, tag);
Status rdone = null;
Status sdone = null;
long timeout = 1000; // Wait this many msecs
long then = System.currentTimeMillis();
while ((sdone == null || rdone == null) && System.currentTimeMillis() - then < timeout) {
if (rdone == null) rdone = rreq.testStatus();
if (sdone == null) sdone = sreq.testStatus();
Thread.sleep(1);
}
if (rdone == null) rreq.cancel();
if (sdone == null) sreq.cancel();
if (rdone == null || sdone == null) {
return false;
}
sbuf.rewind();
rbuf.rewind();
} catch (Exception e) {
throw new RuntimeException("Exception in sendrecv "+e);
} */
}
return true;
}
}
}
public class Msg {
int [] buf;
int size;
int sender;
int receiver;
public Msg(IntBuffer inbuf, int size0, int sender0, int receiver0) {
buf = new int[size0];
inbuf.rewind();
inbuf.get(buf, 0, size0);
inbuf.rewind();
size = size0;
sender = sender0;
receiver = receiver0;
}
}
public Machine [] simNetwork;
public AllReduce(int M) {
simNetwork = new Machine[M];
}
}
|
package es.sandbox.spike.connectn;
import java.util.Objects;
import java.util.Optional;
public class Board {
private final Dimensions dimensions;
private final GameResultCalculator gameResultCalculator;
private final Chip[][] chips;
private Color nextTurn;
/**
* @param dimensions
* @param chipsToWin
* @param startingColor
*/
public Board(Dimensions dimensions, int chipsToWin, Color startingColor) {
Objects.requireNonNull(dimensions, "Dimensions may not be null");
GameRules.validateChipsToWin(chipsToWin, dimensions);
Objects.requireNonNull(startingColor, "Starting color may not be null");
this.chips = dimensions.createBoard();
this.dimensions = dimensions;
this.gameResultCalculator = new GameResultCalculator(this, chipsToWin);
this.nextTurn = startingColor;
}
Dimensions dimensions() {
return this.dimensions;
}
/**
* @param color
* @param column
* @return
* @throws ColumnOutOfRangeException
* @throws ColumnIsFullException
* @throws WrongTurnException
* @throws GameOverException
*/
public Result put(Color color, int column)
throws ColumnOutOfRangeException, ColumnIsFullException, WrongTurnException, GameOverException {
this.gameResultCalculator.assertThatGameIsOnGoing();
consumeTurnFor(color);
final Position position = findFirstEmptyPositionInColumn(column);
this.chips[position.column()][position.row()] = new Chip(color, position);
return calculateResultFor(position);
}
private void consumeTurnFor(Color color) {
if (this.nextTurn != color) {
throw new WrongTurnException(color);
}
this.nextTurn = color.rotate();
}
private Position findFirstEmptyPositionInColumn(int column) throws ColumnOutOfRangeException, ColumnIsFullException {
return dimensions().positionsAtColumn(column).stream()
.filter(position -> !chipAt(position).isPresent())
.findFirst()
.orElseThrow(() -> new ColumnIsFullException(column));
}
private Result calculateResultFor(Position position) {
return this.gameResultCalculator.calculateFor(position);
}
/**
* @param position
* @return
*/
public Optional<Color> colorAt(Position position) {
Objects.requireNonNull(position, "Position must be non null");
return chipAt(position).map(Chip::color);
}
/**
* @param position
* @return
*/
public Optional<Chip> chipAt(Position position) {
Objects.requireNonNull(position, "Position must be non null");
if (dimensions().contains(position)) {
return Optional.ofNullable(this.chips[position.column()][position.row()]);
}
return Optional.empty();
}
public Result getResult() {
return this.gameResultCalculator.getResult();
}
}
|
package fi.solita.utils.meta;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.TreeMap;
import javax.lang.model.element.Element;
public class Workaround {
private static Class<?> sourceTypeBinding;
static {
try {
sourceTypeBinding = Class.forName("org.eclipse.jdt.internal.compiler.lookup.SourceTypeBinding");
} catch (ClassNotFoundException e) {
// ignore
}
}
public static List<? extends Element> getEnclosedElementsDeclarationOrder(Element type) {
try {
Object binding;
try {
binding = field(type, "_binding");
} catch (NoSuchFieldException e) {
// not eclipse, skip workaround
return type.getEnclosedElements();
}
if (sourceTypeBinding == null) {
return type.getEnclosedElements();
}
final List<Object> declarationOrder;
if (sourceTypeBinding.isAssignableFrom(binding.getClass())) {
declarationOrder = findSourceOrder(binding);
} else {
throw new RuntimeException(sourceTypeBinding.getClass() + " not assignable from " + binding.getClass());
}
List<Element> enclosedElements = new ArrayList<Element>(type.getEnclosedElements());
Collections.sort(enclosedElements, new Comparator<Element>() {
public int compare(Element o1, Element o2) {
try {
Object o1Binding = field(o1, "_binding");
Object o2Binding = field(o2, "_binding");
int i1 = declarationOrder.indexOf(o1Binding);
int i2 = declarationOrder.indexOf(o2Binding);
return i1 - i2;
} catch (Exception e) {
return 0;
}
}
});
return enclosedElements;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private static List<Object> findSourceOrder(Object binding) throws SecurityException, IllegalArgumentException, NoSuchFieldException, IllegalAccessException {
Object referenceContext = field(field(binding, "scope"), "referenceContext");
TreeMap<Integer, Object> orderedBindings = new TreeMap<Integer,Object>();
collectSourceOrder(orderedBindings, referenceContext, "methods");
collectSourceOrder(orderedBindings, referenceContext, "fields");
collectSourceOrder(orderedBindings, referenceContext, "memberTypes");
return new ArrayList<Object>(orderedBindings.values());
}
private static void collectSourceOrder(TreeMap<Integer, Object> orderedBindings, Object referenceContext, String fieldName) throws SecurityException, IllegalArgumentException, NoSuchFieldException, IllegalAccessException {
Object[] declarations = (Object[]) field(referenceContext, fieldName);
if (declarations != null) {
for (int i = 0; i < declarations.length; i++) {
Integer declarationSourceStart = (Integer) field(declarations[i], "declarationSourceStart");
orderedBindings.put(declarationSourceStart, field(declarations[i], "binding"));
}
}
}
private static Object field(Object o, String fieldName) throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException {
if (o == null) {
return null;
}
Field field = o.getClass().getField(fieldName);
field.setAccessible(true);
return field.get(o);
}
}
|
package hudson.plugins.git;
import hudson.MarkupText;
import hudson.model.Hudson;
import hudson.model.User;
import hudson.plugins.git.GitSCM.DescriptorImpl;
import hudson.scm.ChangeLogAnnotator;
import hudson.scm.ChangeLogSet;
import hudson.scm.ChangeLogSet.AffectedFile;
import hudson.scm.EditType;
import org.apache.commons.lang.math.NumberUtils;
import org.kohsuke.stapler.export.Exported;
import org.kohsuke.stapler.export.ExportedBean;
import javax.annotation.CheckForNull;
import java.io.IOException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.TimeZone;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static hudson.Util.fixEmpty;
import javax.annotation.Nullable;
import org.joda.time.DateTime;
import org.joda.time.DateTimeFieldType;
import org.joda.time.format.DateTimeFormatter;
import org.joda.time.format.DateTimeFormatterBuilder;
import org.joda.time.format.ISODateTimeFormat;
/**
* Represents a change set.
* @author Nigel Magnay
*/
public class GitChangeSet extends ChangeLogSet.Entry {
private static final String PREFIX_AUTHOR = "author ";
private static final String PREFIX_COMMITTER = "committer ";
private static final String IDENTITY = "([^<]*)<(.*)> (.*)";
private static final Pattern FILE_LOG_ENTRY = Pattern.compile("^:[0-9]{6} [0-9]{6} ([0-9a-f]{40}) ([0-9a-f]{40}) ([ACDMRTUX])(?>[0-9]+)?\t(.*)$");
private static final Pattern AUTHOR_ENTRY = Pattern.compile("^"
+ PREFIX_AUTHOR + IDENTITY + "$");
private static final Pattern COMMITTER_ENTRY = Pattern.compile("^"
+ PREFIX_COMMITTER + IDENTITY + "$");
private static final Pattern RENAME_SPLIT = Pattern.compile("^(.*?)\t(.*)$");
private static final String NULL_HASH = "0000000000000000000000000000000000000000";
private static final String ISO_8601 = "yyyy-MM-dd'T'HH:mm:ss";
private static final String ISO_8601_WITH_TZ = "yyyy-MM-dd'T'HH:mm:ssX";
private final DateTimeFormatter [] dateFormatters;
public static final Logger LOGGER = Logger.getLogger(GitChangeSet.class.getName());
/**
* This is broken as a part of the 1.5 refactoring.
*
* <p>
* When we build a commit that multiple branches point to, Git plugin historically recorded
* changelogs "revOfBranchInPreviousBuild...revToBuild" for each branch separately. This
* however fails to take full generality of Git commit graph into account, as such rev-lists
* can share common commits, which then get reported multiple times.
*
* <p>
* In Git, a commit doesn't belong to a branch, in the sense that you cannot look at the object graph
* and re-construct exactly how branch has evolved. In that sense, trying to attribute commits to
* branches is a somewhat futile exercise.
*
* <p>
* On the other hand, if this is still deemed important, the right thing to do is to traverse
* the commit graph and see if a commit can be only reachable from the "revOfBranchInPreviousBuild" of
* just one branch, in which case it's safe to attribute the commit to that branch.
*/
private String committer;
private String committerEmail;
private String committerTime;
private String author;
private String authorEmail;
private String authorTime;
private String comment;
private String title;
private String id;
private String parentCommit;
private Collection<Path> paths = new HashSet<>();
private boolean authorOrCommitter;
/**
* Create Git change set using information in given lines
*
* @param lines change set lines read to construct change set
* @param authorOrCommitter if true, use author information (name, time), otherwise use committer information
*/
public GitChangeSet(List<String> lines, boolean authorOrCommitter) {
this.authorOrCommitter = authorOrCommitter;
if (lines.size() > 0) {
parseCommit(lines);
}
// Nearly ISO dates generated by git whatchanged --format=+ci
// Look like '2015-09-30 08:21:24 -0600'
// ISO is '2015-09-30T08:21:24-06:00'
// Uses Builder rather than format pattern for more reliable parsing
DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder();
builder.appendFixedDecimal(DateTimeFieldType.year(), 4);
builder.appendLiteral('-');
builder.appendFixedDecimal(DateTimeFieldType.monthOfYear(), 2);
builder.appendLiteral('-');
builder.appendFixedDecimal(DateTimeFieldType.dayOfMonth(), 2);
builder.appendLiteral(' ');
builder.appendFixedDecimal(DateTimeFieldType.hourOfDay(), 2);
builder.appendLiteral(':');
builder.appendFixedDecimal(DateTimeFieldType.minuteOfHour(), 2);
builder.appendLiteral(':');
builder.appendFixedDecimal(DateTimeFieldType.secondOfMinute(), 2);
builder.appendLiteral(' ');
builder.appendTimeZoneOffset(null, false, 2, 2);
DateTimeFormatter gitDateFormatter = builder.toFormatter();
// DateTimeFormat.forPattern("yyyy-MM-DDTHH:mm:ssZ");
// 2013-03-21T15:16:44+0100
// Uses Builder rather than format pattern for more reliable parsing
builder = new DateTimeFormatterBuilder();
builder.appendFixedDecimal(DateTimeFieldType.year(), 4);
builder.appendLiteral('-');
builder.appendFixedDecimal(DateTimeFieldType.monthOfYear(), 2);
builder.appendLiteral('-');
builder.appendFixedDecimal(DateTimeFieldType.dayOfMonth(), 2);
builder.appendLiteral('T');
builder.appendFixedDecimal(DateTimeFieldType.hourOfDay(), 2);
builder.appendLiteral(':');
builder.appendFixedDecimal(DateTimeFieldType.minuteOfHour(), 2);
builder.appendLiteral(':');
builder.appendFixedDecimal(DateTimeFieldType.secondOfMinute(), 2);
builder.appendTimeZoneOffset(null, false, 2, 2);
DateTimeFormatter nearlyISOFormatter = builder.toFormatter();
DateTimeFormatter isoDateFormat = ISODateTimeFormat.basicDateTimeNoMillis();
dateFormatters = new DateTimeFormatter[3];
dateFormatters[0] = gitDateFormatter; // First priority +%cI format
dateFormatters[1] = nearlyISOFormatter; // Second priority seen in git-plugin
dateFormatters[2] = isoDateFormat; // Third priority, ISO 8601 format
}
private void parseCommit(List<String> lines) {
StringBuilder message = new StringBuilder();
for (String line : lines) {
if( line.length() < 1)
continue;
if (line.startsWith("commit ")) {
String[] split = line.split(" ");
if (split.length > 1) this.id = split[1];
else throw new IllegalArgumentException("Commit has no ID" + lines);
} else if (line.startsWith("tree ")) {
} else if (line.startsWith("parent ")) {
String[] split = line.split(" ");
// parent may be null for initial commit or changelog computed from a shallow clone
if (split.length > 1) this.parentCommit = split[1];
} else if (line.startsWith(PREFIX_COMMITTER)) {
Matcher committerMatcher = COMMITTER_ENTRY.matcher(line);
if (committerMatcher.matches()
&& committerMatcher.groupCount() >= 3) {
this.committer = committerMatcher.group(1).trim();
this.committerEmail = committerMatcher.group(2);
this.committerTime = isoDateFormat(committerMatcher.group(3));
}
} else if (line.startsWith(PREFIX_AUTHOR)) {
Matcher authorMatcher = AUTHOR_ENTRY.matcher(line);
if (authorMatcher.matches() && authorMatcher.groupCount() >= 3) {
this.author = authorMatcher.group(1).trim();
this.authorEmail = authorMatcher.group(2);
this.authorTime = isoDateFormat(authorMatcher.group(3));
}
} else if (line.startsWith(" ")) {
message.append(line.substring(4)).append('\n');
} else if (':' == line.charAt(0)) {
Matcher fileMatcher = FILE_LOG_ENTRY.matcher(line);
if (fileMatcher.matches() && fileMatcher.groupCount() >= 4) {
String mode = fileMatcher.group(3);
if (mode.length() == 1) {
String src = null;
String dst = null;
String path = fileMatcher.group(4);
char editMode = mode.charAt(0);
if (editMode == 'M' || editMode == 'A' || editMode == 'D'
|| editMode == 'R' || editMode == 'C') {
src = parseHash(fileMatcher.group(1));
dst = parseHash(fileMatcher.group(2));
}
// Handle rename as two operations - a delete and an add
if (editMode == 'R') {
Matcher renameSplitMatcher = RENAME_SPLIT.matcher(path);
if (renameSplitMatcher.matches() && renameSplitMatcher.groupCount() >= 2) {
String oldPath = renameSplitMatcher.group(1);
String newPath = renameSplitMatcher.group(2);
this.paths.add(new Path(src, dst, 'D', oldPath, this));
this.paths.add(new Path(src, dst, 'A', newPath, this));
}
}
// Handle copy as an add
else if (editMode == 'C') {
Matcher copySplitMatcher = RENAME_SPLIT.matcher(path);
if (copySplitMatcher.matches() && copySplitMatcher.groupCount() >= 2) {
String newPath = copySplitMatcher.group(2);
this.paths.add(new Path(src, dst, 'A', newPath, this));
}
}
else {
this.paths.add(new Path(src, dst, editMode, path, this));
}
}
}
}
}
this.comment = message.toString();
int endOfFirstLine = this.comment.indexOf('\n');
if (endOfFirstLine == -1) {
this.title = this.comment;
} else {
this.title = this.comment.substring(0, endOfFirstLine);
}
}
/** Convert to iso date format if required */
private String isoDateFormat(String s) {
String date = s;
String timezone = "Z";
int spaceIndex = s.indexOf(' ');
if (spaceIndex > 0) {
date = s.substring(0, spaceIndex);
timezone = s.substring(spaceIndex+1);
}
if (NumberUtils.isDigits(date)) {
// legacy mode
long time = Long.parseLong(date);
DateFormat formatter = new SimpleDateFormat(ISO_8601);
formatter.setTimeZone(TimeZone.getTimeZone("GMT"));
return formatter.format(new Date(time * 1000)) + timezone;
} else {
// already in ISO format
return s;
}
}
private String parseHash(String hash) {
return NULL_HASH.equals(hash) ? null : hash;
}
@Exported
public String getDate() {
return authorOrCommitter ? authorTime : committerTime;
}
@Exported
public String getAuthorEmail() {
return authorOrCommitter ? authorEmail : committerEmail;
}
@Override
public long getTimestamp() {
String date = getDate();
if (date == null) {
LOGGER.log(Level.WARNING, "Failed to parse null date");
return -1;
}
if (date.isEmpty()) {
LOGGER.log(Level.WARNING, "Failed to parse empty date");
return -1;
}
for (DateTimeFormatter dateFormatter : dateFormatters) {
try {
DateTime dateTime = DateTime.parse(date, dateFormatter);
return dateTime.getMillis();
} catch (IllegalArgumentException ia) {
}
}
try {
return new SimpleDateFormat(ISO_8601_WITH_TZ).parse(date).getTime();
} catch (ParseException e) {
return -1;
} catch (IllegalArgumentException ia) {
final String java6FormatDef = ISO_8601_WITH_TZ.replace("X", "Z");
final String java6Date = getDate().replaceAll(":(\\d\\d)$", "$1");
try {
return new SimpleDateFormat(java6FormatDef).parse(java6Date).getTime();
} catch (ParseException e) {
return -1;
}
}
}
@Override
public String getCommitId() {
return id;
}
@Override
public void setParent(ChangeLogSet parent) {
super.setParent(parent);
}
public @CheckForNull
String getParentCommit() {
return parentCommit;
}
@Override
public Collection<String> getAffectedPaths() {
Collection<String> affectedPaths = new HashSet<>(this.paths.size());
for (Path file : this.paths) {
affectedPaths.add(file.getPath());
}
return affectedPaths;
}
/**
* Gets the files that are changed in this commit.
* @return
* can be empty but never null.
*/
@Exported
public Collection<Path> getPaths() {
return paths;
}
@Override
public Collection<Path> getAffectedFiles() {
return this.paths;
}
/**
* Returns user of the change set.
*
* @param csAuthor user name.
* @param csAuthorEmail user email.
* @param createAccountBasedOnEmail true if create new user based on committer's email.
* @return {@link User}
*/
public User findOrCreateUser(String csAuthor, String csAuthorEmail, boolean createAccountBasedOnEmail) {
User user;
if (csAuthor == null) {
return User.getUnknown();
}
if (createAccountBasedOnEmail) {
user = User.get(csAuthorEmail, false);
if (user == null) {
try {
user = User.get(csAuthorEmail, true);
user.setFullName(csAuthor);
if (hasHudsonTasksMailer())
setMail(user, csAuthorEmail);
user.save();
} catch (IOException e) {
// add logging statement?
}
}
} else {
user = User.get(csAuthor, false);
if (user == null) {
// Ensure that malformed email addresses (in this case, just '@')
// don't mess us up.
String[] emailParts = csAuthorEmail.split("@");
if (emailParts.length > 0) {
user = User.get(emailParts[0], true);
} else {
return User.getUnknown();
}
}
}
// set email address for user if none is already available
if (fixEmpty(csAuthorEmail) != null && hasHudsonTasksMailer() && !hasMail(user)) {
try {
setMail(user, csAuthorEmail);
} catch (IOException e) {
// ignore
}
}
return user;
}
// TODO 1.651.2+ replace by API method
@Nullable
private static User getById(String id, boolean create) {
try {
return (User) User.class.getMethod("getById", String.class, boolean.class).invoke(null, id, create);
} catch (NoSuchMethodException x) {
// fine, 1.651.1 or earlier
} catch (Exception x) {
LOGGER.log(Level.WARNING, null, x);
}
return User.get(id, create);
}
private void setMail(User user, String csAuthorEmail) throws IOException {
user.addProperty(new hudson.tasks.Mailer.UserProperty(csAuthorEmail));
}
private boolean hasMail(User user) {
hudson.tasks.Mailer.UserProperty property = user.getProperty(hudson.tasks.Mailer.UserProperty.class);
return property != null && property.hasExplicitlyConfiguredAddress();
}
private boolean hasHudsonTasksMailer() {
// TODO convert to checking for mailer plugin as plugin migrates to 1.509+
try {
Class.forName("hudson.tasks.Mailer");
return true;
} catch (ClassNotFoundException e) {
return false;
}
}
private boolean isCreateAccountBasedOnEmail() {
Hudson hudson = Hudson.getInstance();
if (hudson == null) {
return false;
}
DescriptorImpl descriptor = (DescriptorImpl) hudson.getDescriptor(GitSCM.class);
return descriptor.isCreateAccountBasedOnEmail();
}
@Override
@Exported
public User getAuthor() {
String csAuthor;
String csAuthorEmail;
// If true, use the author field from git log rather than the committer.
if (authorOrCommitter) {
csAuthor = this.author;
csAuthorEmail = this.authorEmail;
}
else {
csAuthor = this.committer;
csAuthorEmail = this.committerEmail;
}
return findOrCreateUser(csAuthor, csAuthorEmail, isCreateAccountBasedOnEmail());
}
/**
* Gets the author name for this changeset - note that this is mainly here
* so that we can test authorOrCommitter without needing a fully instantiated
* Hudson (which is needed for User.get in getAuthor()).
*
* @return author name
*/
public String getAuthorName() {
// If true, use the author field from git log rather than the committer.
String csAuthor = authorOrCommitter ? author : committer;
return csAuthor;
}
@Override
@Exported
public String getMsg() {
return this.title;
}
@Exported
public String getId() {
return this.id;
}
public String getRevision() {
return this.id;
}
@Exported
public String getComment() {
return this.comment;
}
/**
* Gets {@linkplain #getComment() the comment} fully marked up by {@link ChangeLogAnnotator}.
@return annotated comment
*/
public String getCommentAnnotated() {
MarkupText markup = new MarkupText(getComment());
for (ChangeLogAnnotator a : ChangeLogAnnotator.all())
a.annotate(getParent().getRun(), this, markup);
return markup.toString(false);
}
public String getBranch() {
return null;
}
@ExportedBean(defaultVisibility=999)
public static class Path implements AffectedFile {
private String src;
private String dst;
private char action;
private String path;
private GitChangeSet changeSet;
private Path(String source, String destination, char action, String filePath, GitChangeSet changeSet) {
this.src = source;
this.dst = destination;
this.action = action;
this.path = filePath;
this.changeSet = changeSet;
}
public String getSrc() {
return src;
}
public String getDst() {
return dst;
}
@Exported(name="file")
public String getPath() {
return path;
}
public GitChangeSet getChangeSet() {
return changeSet;
}
@Exported
public EditType getEditType() {
switch (action) {
case 'A':
return EditType.ADD;
case 'D':
return EditType.DELETE;
default:
return EditType.EDIT;
}
}
}
public int hashCode() {
return id != null ? id.hashCode() : super.hashCode();
}
public boolean equals(Object obj) {
if (obj == this)
return true;
if (obj instanceof GitChangeSet)
return id != null && id.equals(((GitChangeSet) obj).id);
return false;
}
}
|
package innovimax.mixthem.io;
import java.io.IOException;
/**
* This interface provides for reading characters from an character-input.
* @author Innovimax
* @version 1.0
*/
public interface IInputChar {
/**
* Returns true if there is more characters.
* @return Returns true if there is more characters
* @throws IOException - If an I/O error occurs
*/
boolean hasCharacter() throws IOException;
/**
* Reads a character, or returns -1 if no more characters.
* @param type The type of reading expected
* @return The character as a Character or null if more characters
* @throws IOException - If an I/O error occurs
*/
Character nextCharacter() throws IOException;
/**
* Reads characters into a portion of an array.
* @param buffer Destination buffer
* @param len - Maximum number of characters to read
* @return The number of characters read, or -1 if there is no more characters
* @throws IOException - If an I/O error occurs
*/
int nextCharacters(char[] buffer, int len) throws IOException;
/**
* Closes this input and releases any system resources associated with it.
* @throws IOException - If an I/O error occurs
*/
void close() throws IOException;
}
|
package io.xrates.backend;
import io.xrates.backend.datamodel.dao.UserDao;
import io.xrates.backend.exceptions.RateProviderException;
import io.xrates.backend.ratecheck.BusinessLogic;
import io.xrates.backend.rateprovider.IRateProvider;
import java.io.IOException;
import java.util.Currency;
import java.util.List;
import javax.transaction.Transactional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
@Component
public class MainScheduler {
Logger log = LoggerFactory.getLogger(MainScheduler.class.getName());
@Autowired
private List<IRateProvider> rateProviders;
@Autowired
private UserDao userDao;
@Autowired
private BusinessLogic bl;
@Scheduled(cron="0 15 9-18 * * MON-FRI")
@Transactional
public void getRate() throws IOException {
log.debug("Running scheduler");
for (int i = 0;i<this.rateProviders.size();i++){
try {
rateProviders.get(i).update();
log.info(rateProviders.get(i).getRateProvider().getProviderName() +
" : For 1 "+Currency.getInstance("SGD").getDisplayName() +
" you get " + String.valueOf(
this.rateProviders.get(i).convert(Currency.getInstance("SGD"),
Currency.getInstance("INR")))
+ " " + Currency.getInstance("INR").getDisplayName());
} catch (RateProviderException e) {
log.error("Error in " + rateProviders.getClass() + " :" + e.getMessage());
}
}
bl.process();
bl.notifyAlerts();
}
}
|
package istc.bigdawg.monitoring;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import istc.bigdawg.BDConstants;
import istc.bigdawg.exceptions.MigrationException;
import istc.bigdawg.exceptions.NotSupportIslandException;
import istc.bigdawg.executor.Executor;
import istc.bigdawg.executor.plan.QueryExecutionPlan;
import istc.bigdawg.migration.MigrationStatistics;
import istc.bigdawg.packages.CrossIslandQueryNode;
import istc.bigdawg.packages.CrossIslandQueryPlan;
import istc.bigdawg.parsers.UserQueryParser;
import istc.bigdawg.postgresql.PostgreSQLHandler;
import istc.bigdawg.query.ConnectionInfo;
import istc.bigdawg.query.ConnectionInfoParser;
import istc.bigdawg.signature.Signature;
import org.apache.log4j.Logger;
import org.mortbay.log.Log;
public class Monitor {
private static Logger logger = Logger.getLogger(Monitor.class.getName());
public static final String stringSeparator = "****";
private static final String INSERT = "INSERT INTO monitoring (signature, index, lastRan, duration) SELECT '%s', %d, -1, -1 WHERE NOT EXISTS (SELECT 1 FROM monitoring WHERE signature='%s' AND index=%d)";
private static final String DELETE = "DELETE FROM monitoring WHERE signature='%s'";
private static final String UPDATE = "UPDATE monitoring SET lastRan=%d, duration=%d WHERE signature='%s' AND index=%d";
private static final String RETRIEVE = "SELECT duration FROM monitoring WHERE signature='%s' ORDER BY index";
private static final String SIGS = "SELECT DISTINCT(signature) FROM monitoring";
private static final String MINDURATION = "SELECT min(duration) FROM monitoring";
private static final String MIGRATE = "INSERT INTO migrationstats(fromLoc, toLoc, objectFrom, objectTo, startTime, endTime, countExtracted, countLoaded, message) VALUES ('%s', '%s', '%s', '%s', %d, %d, %d, %d, '%s')";
private static final String RETRIEVEMIGRATE = "SELECT objectFrom, objectTo, startTime, endTime, countExtracted, countLoaded, message FROM migrationstats WHERE fromLoc='%s' AND toLoc='%s'";
public static boolean addBenchmarks(Signature signature, boolean lean) throws Exception {
BDConstants.Shim[] shims = BDConstants.Shim.values();
return addBenchmarks(signature, lean, shims);
}
public static boolean addBenchmarks(Signature signature, boolean lean, BDConstants.Shim[] shims) throws Exception {
LinkedHashMap<String, String> crossIslandQuery = UserQueryParser.getUnwrappedQueriesByIslands(signature.getQuery());
logger.debug("Query for signature: " + signature.getQuery());
CrossIslandQueryPlan ciqp = new CrossIslandQueryPlan(crossIslandQuery);
CrossIslandQueryNode ciqn = ciqp.getRoot();
List<QueryExecutionPlan> qeps = ciqn.getAllQEPs(true);
for (int i = 0; i < qeps.size(); i++){
try {
if (!insert(signature, i)) {
return false;
}
} catch (NotSupportIslandException e) {
e.printStackTrace();
}
}
if (!lean) {
try {
runBenchmarks(qeps, signature);
} catch (Exception e) {
e.printStackTrace();
}
}
return true;
}
public static boolean removeBenchmarks(Signature signature) {
return delete(signature);
}
public static boolean allQueriesDone() {
PostgreSQLHandler handler = new PostgreSQLHandler();
try {
PostgreSQLHandler.QueryResult qresult = handler.executeQueryPostgreSQL(MINDURATION);
List<List<String>> rows = qresult.getRows();
long minDuration = Long.MAX_VALUE;
for (List<String> row: rows){
long currentDuration = Long.parseLong(row.get(0));
if (currentDuration < minDuration){
minDuration = currentDuration;
}
}
if (minDuration < 0){
return false;
}
} catch (SQLException e) {
e.printStackTrace();
}
return true;
}
public static List<Long> getBenchmarkPerformance(Signature signature) throws NotSupportIslandException, SQLException {
List<Long> perfInfo = new ArrayList<>();
String escapedSignature = signature.toRecoverableString().replace("'", stringSeparator);
PostgreSQLHandler handler = new PostgreSQLHandler();
PostgreSQLHandler.QueryResult qresult = handler.executeQueryPostgreSQL(String.format(RETRIEVE, escapedSignature));
List<List<String>> rows = qresult.getRows();
for (List<String> row: rows){
long currentDuration = Long.parseLong(row.get(0));
if (currentDuration >= 0){
perfInfo.add(currentDuration);
} else {
perfInfo.add(Long.MAX_VALUE);
}
}
System.out.printf("[BigDAWG] MONITOR: Performance information generated.\n");
return perfInfo;
}
public static List<Signature> getAllSignatures() {
List<Signature> signatures = new ArrayList<>();
PostgreSQLHandler handler = new PostgreSQLHandler();
try {
PostgreSQLHandler.QueryResult qresult = handler.executeQueryPostgreSQL(SIGS);
List<List<String>> rows = qresult.getRows();for (List<String> row: rows){
String signature = row.get(0).replace(stringSeparator, "'");
signatures.add(new Signature(signature));
}
} catch (Exception e) {
Log.debug(e.getMessage());
e.printStackTrace();
}
return signatures;
}
public static Signature getClosestSignature(Signature signature) {
// TODO This needs to be changed to be much more efficient.
// We need a way to do similarity in postgres (likely indexing on signature)
// based on the dimensions we care about
List<Signature> signatures = getAllSignatures();
Signature closest = null;
double distance = Double.MAX_VALUE;
for (Signature current: signatures){
// compare them and pick the closest Signature
double curDist = signature.compare(current);
if (curDist < distance){
distance = curDist;
closest = current;
}
}
return closest;
}
private static boolean insert(Signature signature, int index) throws NotSupportIslandException {
PostgreSQLHandler handler = new PostgreSQLHandler();
try {
String escapedSignature = signature.toRecoverableString().replace("'", stringSeparator);
handler.executeStatementPostgreSQL(String.format(INSERT, escapedSignature, index, escapedSignature, index));
return true;
} catch (SQLException e) {
return false;
}
}
private static boolean delete(Signature signature) {
PostgreSQLHandler handler = new PostgreSQLHandler();
try {
String escapedSignature = signature.toRecoverableString().replace("'", stringSeparator);
handler.executeStatementPostgreSQL(String.format(DELETE, escapedSignature));
return true;
} catch (SQLException e) {
return false;
}
}
public static void runBenchmarks(List<QueryExecutionPlan> qeps, Signature signature) throws SQLException, MigrationException {
for (int i = 0; i < qeps.size(); i++){
Executor.executePlan(qeps.get(i), signature, i);
}
}
public void finishedBenchmark(Signature signature, int index, long startTime, long endTime) throws SQLException {
PostgreSQLHandler handler = new PostgreSQLHandler();
String escapedSignature = signature.toRecoverableString().replace("'", stringSeparator);
handler.executeStatementPostgreSQL(String.format(UPDATE, endTime, endTime-startTime, escapedSignature, index));
// Only for testing purposes.Uncomment when necessary.
/* try {
File temp = File.createTempFile("queries", ".tmp");
BufferedWriter bw = new BufferedWriter(new FileWriter(temp,true));
bw.write(String.format("%d %s %s\n", endTime-startTime, qep.getIsland(), qepString));
bw.close();
} catch (IOException e) {
e.printStackTrace();
}*/
}
public static void addMigrationStats(MigrationStatistics stats) throws SQLException {
PostgreSQLHandler handler = new PostgreSQLHandler();
String fromLoc = ConnectionInfoParser.connectionInfoToString(stats.getConnectionFrom());
String toLoc = ConnectionInfoParser.connectionInfoToString(stats.getConnectionTo());
long countExtracted = -1;
long countLoaded = -1;
if (stats.getCountExtractedElements() != null){
countExtracted = stats.getCountExtractedElements();
}
if (stats.getCountLoadedElements() != null){
countLoaded = stats.getCountLoadedElements();
}
handler.executeStatementPostgreSQL(String.format(MIGRATE, fromLoc, toLoc, stats.getObjectFrom(), stats.getObjectTo(), stats.getStartTimeMigration(), stats.getEndTimeMigration(), countExtracted, countLoaded, stats.getMessage()));
}
public List<MigrationStatistics> getMigrationStats(ConnectionInfo from, ConnectionInfo to) throws SQLException {
String fromLoc = ConnectionInfoParser.connectionInfoToString(from);
String toLoc = ConnectionInfoParser.connectionInfoToString(to);
PostgreSQLHandler handler = new PostgreSQLHandler();
PostgreSQLHandler.QueryResult qresult = handler.executeQueryPostgreSQL(String.format(RETRIEVEMIGRATE, fromLoc, toLoc));
List<MigrationStatistics> results = new ArrayList<>();
List<List<String>> rows = qresult.getRows();
for (List<String> row: rows){
String objectFrom = row.get(0);
String objectTo = row.get(1);
long startTime = Long.parseLong(row.get(2));
long endTime = Long.parseLong(row.get(3));
long countExtracted = Long.parseLong(row.get(4));
Long countExtractedElements = null;
if (countExtracted >= 0) {
countExtractedElements = countExtracted;
}
long countLoaded = Long.parseLong(row.get(5));
Long countLoadedElements = null;
if (countLoaded >= 0) {
countLoadedElements = countLoaded;
}
String message = row.get(6);
results.add(new MigrationStatistics(from, to, objectFrom, objectTo, startTime, endTime, countExtractedElements, countLoadedElements, message));
}
return results;
}
}
|
package me.winter.boing.util;
import com.badlogic.gdx.math.Vector2;
import static java.lang.Math.signum;
public class VectorUtil
{
public static final Vector2 UP = new Vector2(0, 1);
public static final Vector2 DOWN = new Vector2(0, -1);
public static final Vector2 LEFT = new Vector2(-1, 0);
public static final Vector2 RIGHT = new Vector2(1, 0);
public static final Vector2 UPLEFT = new Vector2(-1, 1).nor();
public static final Vector2 DOWNLEFT = new Vector2(-1, -1).nor();
public static final Vector2 UPRIGHT = new Vector2(1, 1).nor();
public static final Vector2 DOWNRIGHT = new Vector2(1, -1).nor();
private VectorUtil() {}
public static Vector2 divide(Vector2 vec, float scalar)
{
vec.x /= scalar;
vec.y /= scalar;
return vec;
}
public static Vector2 divide(Vector2 vec, float x, float y)
{
vec.x /= x;
vec.y /= y;
return vec;
}
public static Vector2 divide(Vector2 vec, Vector2 divider)
{
vec.x /= divider.x;
vec.y /= divider.y;
return vec;
}
public static Vector2 append(Vector2 toAppend, Vector2 other)
{
if(signum(toAppend.x) == signum(other.x))
toAppend.x += other.x;
if(signum(toAppend.y) == signum(other.y))
toAppend.y += other.y;
return toAppend;
}
}
|
package mnm.mods.tabbychat.gui;
import java.awt.Dimension;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.TimeUnit;
import mnm.mods.tabbychat.TabbyChat;
import mnm.mods.tabbychat.api.Channel;
import mnm.mods.tabbychat.api.Message;
import mnm.mods.tabbychat.api.TabbyAPI;
import mnm.mods.tabbychat.core.GuiNewChatTC;
import mnm.mods.tabbychat.util.ChatTextUtils;
import mnm.mods.util.Color;
import mnm.mods.util.gui.GuiComponent;
import mnm.mods.util.gui.events.GuiMouseAdapter;
import mnm.mods.util.gui.events.GuiMouseEvent;
import net.minecraft.client.gui.Gui;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.gui.GuiUtilRenderComponents;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.entity.player.EntityPlayer.EnumChatVisibility;
import net.minecraft.util.ChatComponentText;
import net.minecraft.util.IChatComponent;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.Lists;
public class ChatArea extends GuiComponent implements Supplier<List<Message>>, GuiMouseAdapter {
private Supplier<List<Message>> supplier = Suppliers.memoizeWithExpiration(this, 50,
TimeUnit.MILLISECONDS);
private int scrollPos = 0;
public ChatArea() {
this.setMinimumSize(new Dimension(300, 160));
}
@Override
public void accept(GuiMouseEvent event) {
if (event.event == GuiMouseEvent.SCROLLED) {
// Scrolling
int scroll = event.scroll;
// One tick = 120
int div = 60;
if (GuiScreen.isShiftKeyDown()) {
div *= 3;
}
scroll(scroll / div);
}
}
@Override
public void onClosed() {
resetScroll();
super.onClosed();
}
@Override
public void drawComponent(int mouseX, int mouseY) {
if (mc.gameSettings.chatVisibility != EnumChatVisibility.HIDDEN) {
List<Message> visible = getChat(false);
int height = visible.size() * mc.fontRendererObj.FONT_HEIGHT;
if (GuiNewChatTC.getInstance().getChatOpen()) {
Gui.drawRect(0, 0, getBounds().width, getBounds().height,
getBackColor());
drawBorders(0, 0, getBounds().width, getBounds().height);
} else if (height != 0) {
int y = getBounds().height - height;
Gui.drawRect(getBounds().x, y - 1, getBounds().width, y + height, getBackColor());
drawBorders(getBounds().x, y - 1, getBounds().width, y + height);
}
int xPos = getBounds().x + 1;
int yPos = getBounds().height;
for (Message line : visible) {
yPos -= mc.fontRendererObj.FONT_HEIGHT;
drawChatLine(line, xPos, yPos);
}
}
}
private void drawChatLine(Message line, int xPos, int yPos) {
GlStateManager.enableBlend();
String text = line.getMessageWithOptionalTimestamp().getFormattedText();
mc.fontRendererObj.drawStringWithShadow(text, xPos, yPos, (getForeColor())
+ (getLineOpacity(line) << 24));
GlStateManager.disableAlpha();
GlStateManager.disableBlend();
}
@Override
public List<Message> get() {
return getChat(true);
}
public List<Message> getChat(boolean force) {
if (!force) {
return supplier.get();
}
return getChat();
}
private List<Message> getChat() {
Channel channel = TabbyAPI.getAPI().getChat().getActiveChannel();
List<Message> messages = Lists.newArrayList();
List<Message> lines = ChatTextUtils.split(channel.getMessages(), getBounds().width);
int length = 0;
this.scrollPos = Math.min(this.scrollPos, lines.size()
- (getBounds().height / mc.fontRendererObj.FONT_HEIGHT));
this.scrollPos = Math.max(this.scrollPos, 0);
int pos = scrollPos;
// TODO Setting
int div = GuiNewChatTC.getInstance().getChatOpen() ? 1 : 2;
while (pos < lines.size() && length < getBounds().height / div - 8) {
Message line = lines.get(pos);
if (GuiNewChatTC.getInstance().getChatOpen()) {
messages.add(line);
} else if (getLineOpacity(line) > 3) {
messages.add(line);
} else {
break;
}
pos++;
length += mc.fontRendererObj.FONT_HEIGHT;
}
return messages;
}
private int getLineOpacity(Message line) {
int opacity = new Color(getForeColor()).getAlpha();
double age = mc.ingameGUI.getUpdateCounter() - line.getCounter();
if (!mc.ingameGUI.getChatGUI().getChatOpen()) {
double opacPerc = age / TabbyChat.getInstance().chatSettings.fadeTime.getValue();
opacPerc = 1.0D - opacPerc;
opacPerc *= 10.0D;
opacPerc = Math.max(0, opacPerc);
opacPerc = Math.min(1, opacPerc);
opacPerc *= opacPerc;
opacity = (int) (opacity * opacPerc);
}
return opacity;
}
public void scroll(int scr) {
this.scrollPos += scr;
}
public void resetScroll() {
this.scrollPos = 0;
}
public IChatComponent getChatComponent(int clickX, int clickY) {
IChatComponent result = null;
if (GuiNewChatTC.getInstance().getChatOpen()) {
if (clickX >= getActualPosition().x && clickY >= getActualPosition().y
&& clickX <= getActualPosition().x + getBounds().width
&& clickY <= getActualPosition().y + getBounds().height) {
int bottom = getActualPosition().y + getBounds().height;
int linePos = Math
.abs((clickY - getBounds().height - getActualPosition().y - (bottom % mc.fontRendererObj.FONT_HEIGHT))
/ (this.mc.fontRendererObj.FONT_HEIGHT) + 1);
if (linePos >= 0 && linePos < this.getChat(false).size()) {
Message chatline = getChat(false).get(linePos);
int l1 = 0;
Iterator<IChatComponent> iterator = chatline.getMessageWithOptionalTimestamp().iterator();
while (iterator.hasNext() && l1 <= clickX) {
IChatComponent ichatcomponent = iterator.next();
if (ichatcomponent instanceof ChatComponentText) {
l1 += this.mc.fontRendererObj.getStringWidth(GuiUtilRenderComponents
.func_178909_a(((ChatComponentText) ichatcomponent)
.getChatComponentText_TextValue(), true));
result = ichatcomponent;
}
}
}
}
}
return result;
}
public float getChatScale() {
return this.mc.gameSettings.chatScale;
}
}
|
package net.etalia.cron;
import java.io.File;
import java.io.FileInputStream;
import java.util.Properties;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
public class ScheduledImport {
private static final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
public static final String PROP_FILES_DIR = "directory";
public static final String PROP_FILE_FORMAT = "format";
public static final String PROP_USER = "email";
public static final String PROP_PASSWORD = "password";
public static final String PROP_PUBLICATION = "publication";
public static final String PROP_STAMP = "stamp";
// BASE URL
private static Properties properties;
private static String propertyFile;
public static String getProperty(String name) {
if (properties == null) {
properties = new Properties();
try {
properties.load(new FileInputStream(propertyFile));
} catch (Exception e) {
e.printStackTrace();
}
}
return properties.getProperty(name);
}
public static void main(String[] args) {
if (args.length != 0) {
propertyFile = args[0];
} else {
propertyFile = System.getProperty("user.dir") + File.separator + ".etalia-import.properties";
}
String dir = getProperty(PROP_FILES_DIR) != null ? getProperty(PROP_FILES_DIR) : System.getProperty("user.dir");
ScheduledJob job = new ScheduledJob(dir);
scheduler.scheduleWithFixedDelay(job, 0, 60, TimeUnit.SECONDS);
}
}
|
package org.basex.api.jaxrx;
import static org.basex.core.Text.*;
import org.basex.BaseXServer;
import org.basex.core.Prop;
import org.basex.core.Text;
import org.basex.util.Args;
import org.basex.util.Util;
import org.jaxrx.JettyServer;
public final class JaxRxServer extends BaseXServer {
/** JAX-RX String. */
private static final String JAXRX = "JAX-RX";
/** Jetty server. */
private JettyServer jetty;
/**
* Main method, launching the JAX-RX implementation.
* @param args command-line arguments
*/
public static void main(final String[] args) {
new JaxRxServer(args);
}
/**
* Constructor.
* @param args command-line arguments
*/
public JaxRxServer(final String... args) {
super(args);
if(!success || service) return;
// set default ports and paths
set(BXJaxRx.JAXRXPATH, context.prop.get(Prop.JAXRXPATH), false);
set(BXJaxRx.SERVERPORT, context.prop.num(Prop.SERVERPORT), false);
set(BXJaxRx.SERIALIZER, context.prop.get(Prop.SERIALIZER), false);
// store configuration in system properties
// if a property has already been set, the new settings will be ignored
// set user (use 'admin' as default)
final boolean user = System.getProperty(BXJaxRx.USER) != null;
if(!user) set(BXJaxRx.USER, Text.ADMIN, false);
// set password (use 'admin' as default, or request on command line)
final String pass = System.getProperty(BXJaxRx.PASSWORD);
String p = pass != null ? pass : user ? null : Text.ADMIN;
while(p == null) {
Util.out(SERVERPW + COLS);
p = password();
}
set(BXJaxRx.PASSWORD, p, false);
// define path and name of the JAX-RX implementation.
set("org.jaxrx.systemName", Text.NAMELC, false);
set("org.jaxrx.systemPath", BXJaxRx.class.getName(), false);
// start Jetty server (if not done yet)
try {
jetty = new JettyServer(context.prop.num(Prop.JAXRXPORT));
Util.outln(JAXRX + ' ' + SERVERSTART);
} catch(final Exception ex) {
Util.server(ex);
}
}
@Override
public void quit(final boolean user) {
super.quit(user);
if(jetty != null) jetty.stop();
}
/**
* Sets the specified value if property has not been set yet.
* @param key property key
* @param value property value
* @param force force setting
*/
private void set(final String key, final Object value, final boolean force) {
if(force || System.getProperty(key) == null) {
System.setProperty(key, value.toString());
}
}
@Override
public boolean parseArguments(final String[] args) {
final Args arg = new Args(args, this, JAXRXINFO, Util.info(CONSOLE, JAXRX));
boolean daemon = false;
final StringBuilder serial = new StringBuilder();
while(arg.more()) {
if(arg.dash()) {
final char c = arg.next();
if(c == 'D') {
// hidden flag: daemon mode
daemon = true;
} else if(c == 'j') {
// parse JAX-RX server port
context.prop.set(Prop.JAXRXPORT, arg.num());
} else if(c == 'p') {
// parse server port
set(BXJaxRx.SERVERPORT, arg.num(), true);
} else if(c == 'P') {
// specify password
set(BXJaxRx.PASSWORD, arg.string(), true);
} else if(c == 's') {
// set service flag
service = !daemon;
} else if(c == 'S') {
// set/add serialization parameter
if(serial.length() != 0) serial.append(',');
serial.append(arg);
set(BXJaxRx.SERIALIZER, serial, true);
} else if(c == 'U') {
// specify user name
set(BXJaxRx.USER, arg.string(), true);
} else if(c == 'z') {
// suppress logging
quiet = true;
} else {
arg.check(false);
}
} else {
arg.check(false);
if(arg.string().equalsIgnoreCase("stop")) {
stop(context.prop.num(Prop.SERVERPORT));
return false;
}
}
}
return arg.finish();
}
}
|
package org.gestern.gringotts;
import static org.gestern.gringotts.dependency.Dependency.DEP;
import java.io.IOException;
import java.util.logging.Logger;
import net.milkbowl.vault.economy.Economy;
import org.bukkit.command.CommandExecutor;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.plugin.ServicePriority;
import org.bukkit.plugin.ServicesManager;
import org.bukkit.plugin.java.JavaPlugin;
import org.gestern.gringotts.api.impl.VaultConnector;
import org.mcstats.MetricsLite;
import static org.gestern.gringotts.Configuration.CONF;
public class Gringotts extends JavaPlugin {
/** The Gringotts plugin instance. */
public static Gringotts G;
private Logger log;
private Commands gcommand;
/** Manages accounts. */
public Accounting accounting;
@Override
public void onEnable() {
G = this;
log = getLogger();
// load and init configuration
saveDefaultConfig(); // saves default configuration if no config.yml exists yet
FileConfiguration savedConfig = getConfig();
CONF.readConfig(savedConfig);
gcommand = new Commands(this);
CommandExecutor playerCommands = gcommand.new Money();
CommandExecutor moneyAdminCommands = gcommand.new Moneyadmin();
CommandExecutor adminCommands = gcommand.new GringottsCmd();
getCommand("balance").setExecutor(playerCommands);
getCommand("money").setExecutor(playerCommands);
getCommand("moneyadmin").setExecutor(moneyAdminCommands);
getCommand("gringotts").setExecutor(adminCommands);
accounting = new Accounting();
getServer().getPluginManager().registerEvents(new AccountListener(this), this);
registerEconomy();
try {
MetricsLite metrics = new MetricsLite(this);
metrics.start();
} catch (IOException e) {
log.info("Failed to submit PluginMetrics stats");
}
// just call DAO once to ensure it's loaded before startup is complete
DAO.getDao();
log.fine("enabled");
}
@Override
public void onDisable() {
// shut down db connection
DAO.getDao().shutdown();
log.info("disabled");
}
/**
* Register Gringotts as economy provider for vault.
*/
private void registerEconomy() {
if (DEP.vault.exists()) {
final ServicesManager sm = getServer().getServicesManager();
sm.register(Economy.class, new VaultConnector(), this, ServicePriority.Highest);
log.info("Registered Vault interface.");
} else {
log.info("Vault not found. Other plugins may not be able to access Gringotts accounts.");
}
}
}
|
package org.grouplens.inject.graph;
import java.lang.reflect.Type;
import java.util.List;
import javax.inject.Provider;
import com.google.common.base.Function;
/**
* A concrete type. It has a set of dependencies which must be satisfied in
* order to instantiate it. It can also be viewed as an instantiable extension
* of {@link Type}.
*
* <p>
* Nodes are expected to provide a reasonable implementation of
* {@link #equals(Object)} and {@link #hashCode()} so that they can be
* de-duplicated, etc.
*
* @author Michael Ekstrand <ekstrand@cs.umn.edu>
*
*/
public interface Node {
/**
* Get this node's dependencies.
*
* @return A list of dependencies which must be satisfied in order to
* instantiate this node.
*/
List<Desire> getDependencies();
/**
* Get the type of this node.
*
* @return The type of objects to be instantiated by this node.
*/
Type getType();
/**
* Get the type-erased class of this node's type.
* @return The class object for this node's type.
*/
Class<?> getErasedType();
/**
* Create an instance of the type satisfied by this node.
*
* @param dependencies A function mapping desires to providers of their
* instances.
* @return A new instance of the type represented by this node, with the
* specified dependencies injected.
* @review Consider supporting making Node create {@link Provider}s.
*/
Object createInstance(Function<Desire, Provider<?>> dependencies);
}
|
package org.jboss.logmanager;
import java.lang.ref.WeakReference;
import java.util.Collection;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import static org.jboss.logmanager.ConcurrentReferenceHashMap.ReferenceType.STRONG;
import static org.jboss.logmanager.ConcurrentReferenceHashMap.ReferenceType.WEAK;
/**
* A node in the tree of logger names. Maintains weak references to children and a strong reference to its parent.
*/
final class LoggerNode {
/**
* The log manager.
*/
private final LogContext context;
/**
* The parent node, or {@code null} if this is the root logger node.
*/
private final LoggerNode parent;
/**
* The fully-qualified name of this logger.
*/
private final String fullName;
/**
* A weak reference to the logger instance. Only update using {@link #loggerRefUpdater}.
*/
private volatile LoggerRef loggerRef = null;
/**
* The atomic updater for {@link #loggerRef}.
*/
private static final AtomicReferenceFieldUpdater<LoggerNode, LoggerRef> loggerRefUpdater = AtomicReferenceFieldUpdater.newUpdater(LoggerNode.class, LoggerRef.class, "loggerRef");
/**
* The map of names to child nodes. The child node references are weak.
*/
private final ConcurrentMap<String, LoggerNode> children = new ConcurrentReferenceHashMap<String, LoggerNode>(8, STRONG, WEAK);
/**
* Construct a new root instance.
*
* @param context the logmanager
*/
LoggerNode(final LogContext context) {
parent = null;
fullName = "";
this.context = context;
}
/**
* Construct a child instance.
*
* @param context the logmanager
* @param parent the parent node
* @param nodeName the name of this subnode
*/
private LoggerNode(LogContext context, LoggerNode parent, String nodeName) {
nodeName = nodeName.trim();
if (nodeName.length() == 0) {
throw new IllegalArgumentException("nodeName is empty, or just whitespace");
}
this.parent = parent;
if (parent.parent == null) {
fullName = nodeName;
} else {
fullName = parent.fullName + "." + nodeName;
}
this.context = context;
}
/**
* Get or create a relative logger node. The name is relatively qualified to this node.
*
* @param name the name
* @return the corresponding logger node
*/
LoggerNode getOrCreate(final String name) {
if (name == null || name.length() == 0) {
return this;
} else {
int i = name.indexOf('.');
final String nextName = i == -1 ? name : name.substring(0, i);
LoggerNode nextNode = children.get(nextName);
if (nextNode == null) {
nextNode = new LoggerNode(context, this, nextName);
LoggerNode appearingNode = children.putIfAbsent(nextName, nextNode);
if (appearingNode != null) {
nextNode = appearingNode;
}
}
if (i == -1) {
return nextNode;
} else {
return nextNode.getOrCreate(name.substring(i + 1));
}
}
}
/**
* Get a relative logger, if it exists.
*
* @param name the name
* @return the corresponding logger
*/
LoggerNode getIfExists(final String name) {
if (name == null || name.length() == 0) {
return this;
} else {
int i = name.indexOf('.');
final String nextName = i == -1 ? name : name.substring(0, i);
LoggerNode nextNode = children.get(nextName);
if (nextNode == null) {
return null;
}
if (i == -1) {
return nextNode;
} else {
return nextNode.getIfExists(name.substring(i + 1));
}
}
}
/**
* Get or create a logger instance for this node.
*
* @return a logger instance
*/
Logger getOrCreateLogger() {
final String fullName = this.fullName;
final LoggerNode parent = this.parent;
for (;;) {
final LoggerRef loggerRef = this.loggerRef;
if (loggerRef != null) {
final Logger logger = loggerRef.get();
if (logger != null) {
return logger;
}
}
final Logger logger = new Logger(this, fullName);
if (loggerRefUpdater.compareAndSet(this, loggerRef, parent == null ? new StrongLoggerRef(logger) : new WeakLoggerRef(logger))) {
// initialize the effective level
logger.setLevel(null);
return logger;
}
}
}
/**
* Get a logger instance for this node.
*
* @return a logger instance
*/
Logger getLogger() {
final LoggerRef loggerRef = this.loggerRef;
return loggerRef == null ? null : loggerRef.get();
}
/**
* Get the children of this logger.
*
* @return the children
*/
Collection<LoggerNode> getChildren() {
return children.values();
}
/**
* Return the logger instance of the parent logger node, or {@code null} if this is the root logger node.
*
* @return the parent logger instance, or {@code null} for none
*/
Logger getParentLogger() {
LoggerNode node = parent;
while (node != null) {
final Logger instance = node.getLogger();
if (instance != null) {
return instance;
}
node = node.parent;
}
return null;
}
/**
* Get the log context.
*
* @return the log context
*/
LogContext getContext() {
return context;
}
/**
* Recursively update the effective log level of all log instances on all children. The recursion depth will be proportionate to the
* log node nesting depth so stack use should not be an issue. Must only be called while the log context's level
* change lock is held.
*
* @param newLevel the new effective level
*/
void updateChildEffectiveLevel(int newLevel) {
for (LoggerNode node : children.values()) {
if (node != null) {
final Logger instance = node.getLogger();
if (instance != null) {
instance.setEffectiveLevel(newLevel);
}
}
}
}
private interface LoggerRef {
Logger get();
}
private static final class WeakLoggerRef extends WeakReference<Logger> implements LoggerRef {
private WeakLoggerRef(Logger referent) {
super(referent);
}
}
private static final class StrongLoggerRef implements LoggerRef {
private final Logger logger;
private StrongLoggerRef(final Logger logger) {
this.logger = logger;
}
public Logger get() {
return logger;
}
}
}
|
package org.junit.runners;
import java.lang.annotation.Annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.runner.Runner;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.model.FrameworkField;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
/**
* The custom runner <code>Parameterized</code> implements parameterized tests.
* When running a parameterized test class, instances are created for the
* cross-product of the test methods and the test data elements.
* <p>
* For example, to test a Fibonacci function, write:
* <pre>
* @RunWith(Parameterized.class)
* public class FibonacciTest {
* @Parameters(name= "{index}: fib[{0}]={1}")
* public static Iterable<Object[]> data() {
* return Arrays.asList(new Object[][] { { 0, 0 }, { 1, 1 }, { 2, 1 },
* { 3, 2 }, { 4, 3 }, { 5, 5 }, { 6, 8 } });
* }
*
* private int fInput;
*
* private int fExpected;
*
* public FibonacciTest(int input, int expected) {
* fInput= input;
* fExpected= expected;
* }
*
* @Test
* public void test() {
* assertEquals(fExpected, Fibonacci.compute(fInput));
* }
* }
* </pre>
* <p>
* Each instance of <code>FibonacciTest</code> will be constructed using the
* two-argument constructor and the data values in the
* <code>@Parameters</code> method.
* <p>
* In order that you can easily identify the individual tests, you may provide a
* name for the <code>@Parameters</code> annotation. This name is allowed
* to contain placeholders, which are replaced at runtime. The placeholders are
* <dl>
* <dt>{index}</dt>
* <dd>the current parameter index</dd>
* <dt>{0}</dt>
* <dd>the first parameter value</dd>
* <dt>{1}</dt>
* <dd>the second parameter value</dd>
* <dt>...</dt>
* <dd></dd>
* </dl>
* <p>
* In the example given above, the <code>Parameterized</code> runner creates
* names like <code>[1: fib(3)=2]</code>. If you don't use the name parameter,
* then the current parameter index is used as name.
* <p>
* You can also write:
* <pre>
* @RunWith(Parameterized.class)
* public class FibonacciTest {
* @Parameters
* public static Iterable<Object[]> data() {
* return Arrays.asList(new Object[][] { { 0, 0 }, { 1, 1 }, { 2, 1 },
* { 3, 2 }, { 4, 3 }, { 5, 5 }, { 6, 8 } });
* }
*
* @Parameter(0)
* public int fInput;
*
* @Parameter(1)
* public int fExpected;
*
* @Test
* public void test() {
* assertEquals(fExpected, Fibonacci.compute(fInput));
* }
* }
* </pre>
* <p>
* Each instance of <code>FibonacciTest</code> will be constructed with the default constructor
* and fields annotated by <code>@Parameter</code> will be initialized
* with the data values in the <code>@Parameters</code> method.
*
* <p>
* The parameters can be provided as an array, too:
*
* <pre>
* @Parameters
* public static Object[][] data() {
* return new Object[][] { { 0, 0 }, { 1, 1 }, { 2, 1 }, { 3, 2 }, { 4, 3 },
* { 5, 5 }, { 6, 8 } };
* }
* </pre>
*
* <h3>Tests with single parameter</h3>
* <p>
* If your test needs a single parameter only, you don't have to wrap it with an
* array. Instead you can provide an <code>Iterable</code> or an array of
* objects.
* <pre>
* @Parameters
* public static Iterable<? extends Object> data() {
* return Arrays.asList("first test", "second test");
* }
* </pre>
* <p>
* or
* <pre>
* @Parameters
* public static Object[] data() {
* return new Object[] { "first test", "second test" };
* }
* </pre>
*
* @since 4.0
*/
public class Parameterized extends Suite {
/**
* Annotation for a method which provides parameters to be injected into the
* test class constructor by <code>Parameterized</code>. The method has to
* be public and static.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public static @interface Parameters {
/**
* Optional pattern to derive the test's name from the parameters. Use
* numbers in braces to refer to the parameters or the additional data
* as follows:
* <pre>
* {index} - the current parameter index
* {0} - the first parameter value
* {1} - the second parameter value
* etc...
* </pre>
* <p>
* Default value is "{index}" for compatibility with previous JUnit
* versions.
*
* @return {@link MessageFormat} pattern string, except the index
* placeholder.
* @see MessageFormat
*/
String name() default "{index}";
}
/**
* Annotation for fields of the test class which will be initialized by the
* method annotated by <code>Parameters</code><br/>
* By using directly this annotation, the test class constructor isn't needed.<br/>
* Index range must start at 0.
* Default value is 0.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public static @interface Parameter {
/**
* Method that returns the index of the parameter in the array
* returned by the method annotated by <code>Parameters</code>.<br/>
* Index range must start at 0.
* Default value is 0.
*
* @return the index of the parameter.
*/
int value() default 0;
}
protected class TestClassRunnerForParameters extends BlockJUnit4ClassRunner {
private final Object[] fParameters;
private final String fName;
protected TestClassRunnerForParameters(Class<?> type, String pattern, int index, Object[] parameters) throws InitializationError {
super(type);
fParameters = parameters;
fName = nameFor(pattern, index, parameters);
}
@Override
public Object createTest() throws Exception {
if (fieldsAreAnnotated()) {
return createTestUsingFieldInjection();
} else {
return createTestUsingConstructorInjection();
}
}
private Object createTestUsingConstructorInjection() throws Exception {
return getTestClass().getOnlyConstructor().newInstance(fParameters);
}
private Object createTestUsingFieldInjection() throws Exception {
List<FrameworkField> annotatedFieldsByParameter = getAnnotatedFieldsByParameter();
if (annotatedFieldsByParameter.size() != fParameters.length) {
throw new Exception("Wrong number of parameters and @Parameter fields." +
" @Parameter fields counted: " + annotatedFieldsByParameter.size() + ", available parameters: " + fParameters.length + ".");
}
Object testClassInstance = getTestClass().getJavaClass().newInstance();
for (FrameworkField each : annotatedFieldsByParameter) {
Field field = each.getField();
Parameter annotation = field.getAnnotation(Parameter.class);
int index = annotation.value();
try {
field.set(testClassInstance, fParameters[index]);
} catch (IllegalArgumentException iare) {
throw new Exception(getTestClass().getName() + ": Trying to set " + field.getName() +
" with the value " + fParameters[index] +
" that is not the right type (" + fParameters[index].getClass().getSimpleName() + " instead of " +
field.getType().getSimpleName() + ").", iare);
}
}
return testClassInstance;
}
protected String nameFor(String pattern, int index, Object[] parameters) {
String finalPattern = pattern.replaceAll("\\{index\\}", Integer.toString(index));
String name = MessageFormat.format(finalPattern, parameters);
return "[" + name + "]";
}
@Override
protected String getName() {
return fName;
}
@Override
protected String testName(FrameworkMethod method) {
return method.getName() + getName();
}
@Override
protected void validateConstructor(List<Throwable> errors) {
validateOnlyOneConstructor(errors);
if (fieldsAreAnnotated()) {
validateZeroArgConstructor(errors);
}
}
@Override
protected void validateFields(List<Throwable> errors) {
super.validateFields(errors);
if (fieldsAreAnnotated()) {
List<FrameworkField> annotatedFieldsByParameter = getAnnotatedFieldsByParameter();
int[] usedIndices = new int[annotatedFieldsByParameter.size()];
for (FrameworkField each : annotatedFieldsByParameter) {
int index = each.getField().getAnnotation(Parameter.class).value();
if (index < 0 || index > annotatedFieldsByParameter.size() - 1) {
errors.add(
new Exception("Invalid @Parameter value: " + index + ". @Parameter fields counted: " +
annotatedFieldsByParameter.size() + ". Please use an index between 0 and " +
(annotatedFieldsByParameter.size() - 1) + ".")
);
} else {
usedIndices[index]++;
}
}
for (int index = 0; index < usedIndices.length; index++) {
int numberOfUse = usedIndices[index];
if (numberOfUse == 0) {
errors.add(new Exception("@Parameter(" + index + ") is never used."));
} else if (numberOfUse > 1) {
errors.add(new Exception("@Parameter(" + index + ") is used more than once (" + numberOfUse + ")."));
}
}
}
}
@Override
protected Statement classBlock(RunNotifier notifier) {
return childrenInvoker(notifier);
}
@Override
protected Annotation[] getRunnerAnnotations() {
return new Annotation[0];
}
}
private static final List<Runner> NO_RUNNERS = Collections.<Runner>emptyList();
private final List<Runner> fRunners;
/**
* Only called reflectively. Do not use programmatically.
*/
public Parameterized(Class<?> klass) throws Throwable {
super(klass, NO_RUNNERS);
Parameters parameters = getParametersMethod().getAnnotation(
Parameters.class);
fRunners = Collections.unmodifiableList(createRunnersForParameters(allParameters(), parameters.name()));
}
@Override
protected List<Runner> getChildren() {
return fRunners;
}
private Runner createRunnerWithNotNormalizedParameters(String pattern,
int index, Object parametersOrSingleParameter)
throws InitializationError {
Object[] parameters= (parametersOrSingleParameter instanceof Object[]) ? (Object[]) parametersOrSingleParameter
: new Object[] { parametersOrSingleParameter };
return createRunner(pattern, index, parameters);
}
protected Runner createRunner(String pattern, int index, Object[] parameters) throws InitializationError {
return new TestClassRunnerForParameters(getTestClass().getJavaClass(), pattern, index, parameters);
}
@SuppressWarnings("unchecked")
private Iterable<Object> allParameters() throws Throwable {
Object parameters = getParametersMethod().invokeExplosively(null);
if (parameters instanceof Iterable) {
return (Iterable<Object>) parameters;
} else if (parameters instanceof Object[]) {
return Arrays.asList((Object[]) parameters);
} else {
throw parametersMethodReturnedWrongType();
}
}
private FrameworkMethod getParametersMethod() throws Exception {
List<FrameworkMethod> methods = getTestClass().getAnnotatedMethods(
Parameters.class);
for (FrameworkMethod each : methods) {
if (each.isStatic() && each.isPublic()) {
return each;
}
}
throw new Exception("No public static parameters method on class "
+ getTestClass().getName());
}
private List<Runner> createRunnersForParameters(Iterable<Object> allParameters, String namePattern) throws Exception {
try {
int i = 0;
List<Runner> children = new ArrayList<Runner>();
for (Object parametersOfSingleTest : allParameters) {
children.add(createRunnerWithNotNormalizedParameters(
namePattern, i++, parametersOfSingleTest));
}
return children;
} catch (ClassCastException e) {
throw parametersMethodReturnedWrongType();
}
}
private Exception parametersMethodReturnedWrongType() throws Exception {
String className = getTestClass().getName();
String methodName = getParametersMethod().getName();
String message = MessageFormat.format(
"{0}.{1}() must return an Iterable of arrays.",
className, methodName);
return new Exception(message);
}
private List<FrameworkField> getAnnotatedFieldsByParameter() {
return getTestClass().getAnnotatedFields(Parameter.class);
}
private boolean fieldsAreAnnotated() {
return !getAnnotatedFieldsByParameter().isEmpty();
}
}
|
package org.lantern;
import java.net.ConnectException;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.TimerTask;
import org.lantern.event.Events;
import org.lantern.state.Model;
import org.lantern.state.SyncPath;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
public class ConnectivityChecker extends TimerTask {
private static Logger LOG = LoggerFactory
.getLogger(ConnectivityChecker.class);
private static final List<String> TEST_SITES = Arrays.asList(
"mail.yahoo.com",
"www.microsoft.com",
"blogfa.com",
"www.baidu.com"
);
private static final int TEST_SOCKET_TIMEOUT_MILLIS = 30000;
private final Model model;
@Inject
ConnectivityChecker(final Model model) {
this.model = model;
}
public void connect() throws ConnectException {
if (!checkConnectivity()) {
throw new ConnectException("Could not connect");
}
}
@Override
public void run() {
checkConnectivity();
}
public boolean checkConnectivity() {
final boolean wasConnected =
Boolean.TRUE.equals(model.getConnectivity().isInternet());
final boolean connected = areAnyTestSitesReachable();
this.model.getConnectivity().setInternet(connected);
boolean becameConnected = connected && !wasConnected;
boolean becameDisconnected = !connected && wasConnected;
if (becameConnected) {
LOG.info("Became connected");
notifyConnected();
} else if (becameDisconnected) {
LOG.info("Became disconnected");
notifyDisconnected();
}
Events.sync(SyncPath.CONNECTIVITY, model.getConnectivity());
return connected;
}
private void notifyConnected() {
LOG.info("Became connected...");
notifyListeners(true);
}
private void notifyDisconnected() {
LOG.info("Became disconnected...");
notifyListeners(false);
}
private void notifyListeners(final boolean connected) {
ConnectivityChangedEvent event = new ConnectivityChangedEvent(connected);
Events.asyncEventBus().post(event);
}
private static boolean areAnyTestSitesReachable() {
Collections.shuffle(TEST_SITES);
for (String site : TEST_SITES) {
if (isReachable(site)) {
return true;
}
}
LOG.info("None of the test sites were reachable -- no internet connection");
return false;
}
private static boolean isReachable(String site) {
Socket socket = null;
try {
socket = new Socket();
LOG.debug("Testing site: {}", site);
socket.connect(new InetSocketAddress(site, 80),
TEST_SOCKET_TIMEOUT_MILLIS);
return true;
} catch (Exception e) {
LOG.debug("Could not connect to "+site, e);
// Ignore
return false;
} finally {
if (socket != null) {
try {
socket.close();
} catch (Exception e) {
LOG.debug("Unable to close connectivity test socket: {}",
e.getMessage(), e);
}
}
}
}
}
|
package org.lightmare.deploy;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URL;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory;
import javax.annotation.Resource;
import javax.ejb.EJB;
import javax.ejb.Local;
import javax.ejb.Remote;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.ejb.TransactionManagement;
import javax.ejb.TransactionManagementType;
import javax.interceptor.AroundInvoke;
import javax.interceptor.Interceptors;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceUnit;
import org.apache.log4j.Logger;
import org.lightmare.cache.ConnectionData;
import org.lightmare.cache.ConnectionSemaphore;
import org.lightmare.cache.DeployData;
import org.lightmare.cache.InjectionData;
import org.lightmare.cache.InterceptorData;
import org.lightmare.cache.MetaContainer;
import org.lightmare.cache.MetaData;
import org.lightmare.config.Configuration;
import org.lightmare.ejb.exceptions.BeanInUseException;
import org.lightmare.jpa.JPAManager;
import org.lightmare.jpa.datasource.DataSourceInitializer;
import org.lightmare.libraries.LibraryLoader;
import org.lightmare.rest.utils.RestCheck;
import org.lightmare.rest.utils.RestUtils;
import org.lightmare.utils.NamingUtils;
import org.lightmare.utils.ObjectUtils;
import org.lightmare.utils.beans.BeanUtils;
import org.lightmare.utils.fs.FileUtils;
import org.lightmare.utils.fs.WatchUtils;
import org.lightmare.utils.reflect.MetaUtils;
/**
* Class for running in distinct thread to initialize
* {@link javax.sql.DataSource}s load libraries and {@link javax.ejb.Stateless}
* session beans and cache them and clean resources after deployments
*
* @author levan
*
*/
public class BeanLoader {
private static final int LOADER_POOL_SIZE = 5;
private static final String LOADER_THREAD_NAME = "Ejb-Loader-Thread-%s";
private static final Logger LOG = Logger.getLogger(BeanLoader.class);
// Thread pool for deploying and removal of beans and temporal resources
private static final ExecutorService LOADER_POOL = Executors
.newFixedThreadPool(LOADER_POOL_SIZE, new ThreadFactory() {
@Override
public Thread newThread(Runnable runnable) {
Thread thread = new Thread(runnable);
thread.setName(String.format(LOADER_THREAD_NAME,
thread.getId()));
thread.setPriority(Thread.MAX_PRIORITY);
ClassLoader parent = getCurrent();
thread.setContextClassLoader(parent);
return thread;
}
});
/**
* PrivilegedAction implementation to set
* {@link Executors#privilegedCallableUsingCurrentClassLoader()} passed
* {@link Callable} class
*
* @author levan
*
* @param <T>
*/
private static class ContextLoaderAction<T> implements
PrivilegedAction<Callable<T>> {
private final Callable<T> current;
public ContextLoaderAction(Callable<T> current) {
this.current = current;
}
@Override
public Callable<T> run() {
Callable<T> privileged = Executors.privilegedCallable(current);
return privileged;
}
}
/**
* {@link Runnable} implementation for initializing and deploying
* {@link javax.sql.DataSource}
*
* @author levan
*
*/
private static class ConnectionDeployer implements Callable<Boolean> {
private DataSourceInitializer initializer;
private Properties properties;
private CountDownLatch dsLatch;
private boolean countedDown;
public ConnectionDeployer(DataSourceParameters parameters) {
this.initializer = parameters.initializer;
this.properties = parameters.properties;
this.dsLatch = parameters.dsLatch;
}
private void notifyDs() {
if (ObjectUtils.notTrue(countedDown)) {
dsLatch.countDown();
countedDown = Boolean.TRUE;
}
}
@Override
public Boolean call() throws Exception {
boolean result;
ClassLoader loader = getCurrent();
try {
initializer.registerDataSource(properties);
result = Boolean.TRUE;
} catch (IOException ex) {
result = Boolean.FALSE;
LOG.error("Could not initialize datasource", ex);
} finally {
notifyDs();
LibraryLoader.loadCurrentLibraries(loader);
}
return result;
}
}
/**
* {@link Runnable} implementation for temporal resources removal
*
* @author levan
*
*/
private static class ResourceCleaner implements Callable<Boolean> {
List<File> tmpFiles;
public ResourceCleaner(List<File> tmpFiles) {
this.tmpFiles = tmpFiles;
}
/**
* Removes temporal resources after deploy {@link Thread} notifies
*
* @throws InterruptedException
*/
private void clearTmpData() throws InterruptedException {
synchronized (tmpFiles) {
tmpFiles.wait();
}
for (File tmpFile : tmpFiles) {
FileUtils.deleteFile(tmpFile);
LOG.info(String.format("Cleaning temporal resource %s done",
tmpFile.getName()));
}
}
@Override
public Boolean call() throws Exception {
boolean result;
ClassLoader loader = getCurrent();
try {
clearTmpData();
result = Boolean.TRUE;
} catch (InterruptedException ex) {
result = Boolean.FALSE;
LOG.error("Coluld not clean temporary resources", ex);
} finally {
LibraryLoader.loadCurrentLibraries(loader);
}
return result;
}
}
/**
* {@link Callable} implementation for deploying {@link javax.ejb.Stateless}
* session beans and cache {@link MetaData} keyed by bean name
*
* @author levan
*
*/
private static class BeanDeployer implements Callable<String> {
private MetaCreator creator;
private String beanName;
private String className;
private ClassLoader loader;
private List<File> tmpFiles;
private MetaData metaData;
private CountDownLatch blocker;
private boolean released;
private List<Field> unitFields;
private DeployData deployData;
private boolean chekcWatch;
private Configuration configuration;
public BeanDeployer(BeanParameters parameters) {
this.creator = parameters.creator;
this.beanName = parameters.beanName;
this.className = parameters.className;
this.loader = parameters.loader;
this.tmpFiles = parameters.tmpFiles;
this.metaData = parameters.metaData;
this.blocker = parameters.blocker;
this.deployData = parameters.deployData;
this.configuration = parameters.configuration;
}
/**
* Locks {@link ConnectionSemaphore} if needed for connection processing
*
* @param semaphore
* @param unitName
* @param jndiName
* @throws IOException
*/
private void lockSemaphore(ConnectionSemaphore semaphore,
String unitName, String jndiName) throws IOException {
synchronized (semaphore) {
if (ObjectUtils.notTrue(semaphore.isCheck())) {
try {
creator.configureConnection(unitName, beanName, loader,
configuration);
} finally {
semaphore.notifyAll();
}
}
}
}
/**
* Increases {@link CountDownLatch} conn if it is first time in current
* thread
*/
private void releaseBlocker() {
if (ObjectUtils.notTrue(released)) {
blocker.countDown();
released = Boolean.TRUE;
}
}
/**
* Checks if bean {@link MetaData} with same name already cached if it
* is increases {@link CountDownLatch} for connection and throws
* {@link BeanInUseException} else caches meta data with associated name
*
* @param beanEjbName
* @throws BeanInUseException
*/
private void checkAndSetBean(String beanEjbName)
throws BeanInUseException {
try {
MetaContainer.checkAndAddMetaData(beanEjbName, metaData);
} catch (BeanInUseException ex) {
releaseBlocker();
throw ex;
}
}
private void addUnitField(Field unitField) {
if (unitFields == null) {
unitFields = new ArrayList<Field>();
}
unitFields.add(unitField);
}
/**
* Checks weather connection with passed unit or jndi name already
* exists
*
* @param unitName
* @param jndiName
* @return <code>boolean</code>
*/
private boolean checkOnEmf(String unitName, String jndiName) {
boolean checkForEmf;
if (jndiName == null || jndiName.isEmpty()) {
checkForEmf = JPAManager.checkForEmf(unitName);
} else {
jndiName = NamingUtils.createJpaJndiName(jndiName);
checkForEmf = JPAManager.checkForEmf(unitName)
&& JPAManager.checkForEmf(jndiName);
}
return checkForEmf;
}
/**
* Creates {@link ConnectionSemaphore} if such does not exists
*
* @param context
* @param field
* @return <code>boolean</code>
* @throws IOException
*/
private void identifyConnections(PersistenceContext context,
Field connectionField) throws IOException {
ConnectionData connection = new ConnectionData();
connection.setConnectionField(connectionField);
String unitName = context.unitName();
String jndiName = context.name();
connection.setUnitName(unitName);
connection.setJndiName(jndiName);
boolean checkForEmf = checkOnEmf(unitName, jndiName);
ConnectionSemaphore semaphore;
if (checkForEmf) {
releaseBlocker();
semaphore = JPAManager.getSemaphore(unitName);
connection.setConnection(semaphore);
} else {
// Sets connection semaphore for this connection
semaphore = JPAManager.setSemaphore(unitName, jndiName);
connection.setConnection(semaphore);
releaseBlocker();
if (ObjectUtils.notNull(semaphore)) {
lockSemaphore(semaphore, unitName, jndiName);
}
}
metaData.addConnection(connection);
}
/**
* Caches {@link EJB} annotated fields
*
* @param beanClass
*/
private void cacheInjectFields(Field field) {
EJB ejb = field.getAnnotation(EJB.class);
Class<?> interfaceClass = ejb.beanInterface();
if (interfaceClass == null || interfaceClass.equals(Object.class)) {
interfaceClass = field.getType();
}
String name = ejb.beanName();
if (name == null || name.isEmpty()) {
name = BeanUtils.nameFromInterface(interfaceClass);
}
String description = ejb.description();
String mappedName = ejb.mappedName();
Class<?>[] interfaceClasses = { interfaceClass };
InjectionData injectionData = new InjectionData();
injectionData.setField(field);
injectionData.setInterfaceClasses(interfaceClasses);
injectionData.setName(name);
injectionData.setDescription(description);
injectionData.setMappedName(mappedName);
metaData.addInject(injectionData);
}
/**
* Finds and caches {@link PersistenceContext}, {@link PersistenceUnit}
* and {@link Resource} annotated {@link Field}s in bean class and
* configures connections and creates {@link ConnectionSemaphore}s if it
* does not exists for {@link PersistenceContext#unitName()} object
*
* @throws IOException
*/
private void retrieveConnections() throws IOException {
Class<?> beanClass = metaData.getBeanClass();
Field[] fields = beanClass.getDeclaredFields();
PersistenceUnit unit;
PersistenceContext context;
Resource resource;
EJB ejbAnnot;
if (fields == null || fields.length == 0) {
releaseBlocker();
}
for (Field field : fields) {
context = field.getAnnotation(PersistenceContext.class);
resource = field.getAnnotation(Resource.class);
unit = field.getAnnotation(PersistenceUnit.class);
ejbAnnot = field.getAnnotation(EJB.class);
if (ObjectUtils.notNull(context)) {
identifyConnections(context, field);
} else if (ObjectUtils.notNull(resource)) {
metaData.setTransactionField(field);
} else if (ObjectUtils.notNull(unit)) {
addUnitField(field);
} else if (ObjectUtils.notNull(ejbAnnot)) {
// caches EJB annotated fields
cacheInjectFields(field);
}
}
if (ObjectUtils.available(unitFields)) {
metaData.addUnitFields(unitFields);
}
}
/**
* Creates {@link MetaData} for bean class
*
* @param beanClass
* @throws ClassNotFoundException
*/
private void createMeta(Class<?> beanClass) throws IOException {
metaData.setBeanClass(beanClass);
if (Configuration.isServer()) {
retrieveConnections();
} else {
releaseBlocker();
}
metaData.setLoader(loader);
}
/**
* Checks if bean class is annotated as {@link TransactionAttribute} and
* {@link TransactionManagement} and caches
* {@link TransactionAttribute#value()} and
* {@link TransactionManagement#value()} in {@link MetaData} object
*
* @param beanClass
*/
private void checkOnTransactional(Class<?> beanClass) {
TransactionAttribute transactionAttribute = beanClass
.getAnnotation(TransactionAttribute.class);
TransactionManagement transactionManagement = beanClass
.getAnnotation(TransactionManagement.class);
boolean transactional = Boolean.FALSE;
TransactionAttributeType transactionAttrType;
TransactionManagementType transactionManType;
if (transactionAttribute == null) {
transactional = Boolean.TRUE;
transactionAttrType = TransactionAttributeType.REQUIRED;
transactionManType = TransactionManagementType.CONTAINER;
} else if (transactionManagement == null) {
transactionAttrType = transactionAttribute.value();
transactionManType = TransactionManagementType.CONTAINER;
} else {
transactionAttrType = transactionAttribute.value();
transactionManType = transactionManagement.value();
}
metaData.setTransactional(transactional);
metaData.setTransactionAttrType(transactionAttrType);
metaData.setTransactionManType(transactionManType);
}
/**
* Caches {@link Interceptors} annotation defined data
*
* @param beanClass
* @param interceptorClasses
* @throws IOException
*/
private void cacheInterceptors(Class<?> beanClass,
Class<?>[] interceptorClasses, Method beanMethod)
throws IOException {
int length = interceptorClasses.length;
Class<?> interceptorClass;
List<Method> interceptorMethods;
Method interceptorMethod;
for (int i = 0; i < length; i++) {
interceptorClass = interceptorClasses[i];
interceptorMethods = MetaUtils.getAnnotatedMethods(beanClass,
AroundInvoke.class);
interceptorMethod = ObjectUtils.getFirst(interceptorMethods);
InterceptorData data = new InterceptorData();
data.setBeanClass(beanClass);
data.setBeanMethod(beanMethod);
data.setInterceptorClass(interceptorClass);
data.setInterceptorMethod(interceptorMethod);
metaData.addInterceptor(data);
}
}
private void cacheInterceptors(Interceptors interceptors,
Class<?> beanClass, Method... beanMethods) throws IOException {
Class<?>[] interceptorClasses = interceptors.value();
if (ObjectUtils.available(interceptorClasses)) {
Method beanMethod = ObjectUtils.getFirst(beanMethods);
cacheInterceptors(beanClass, interceptorClasses, beanMethod);
}
}
/**
* Identifies and caches {@link Interceptors} annotation data
*
* @throws IOException
*/
private void identifyInterceptors(Class<?> beanClass)
throws IOException {
Interceptors interceptors = beanClass
.getAnnotation(Interceptors.class);
if (ObjectUtils.notNull(interceptors)) {
cacheInterceptors(interceptors, beanClass);
}
List<Method> beanMethods = MetaUtils.getAnnotatedMethods(beanClass,
Interceptors.class);
if (ObjectUtils.available(beanMethods)) {
for (Method beanMethod : beanMethods) {
interceptors = beanMethod.getAnnotation(Interceptors.class);
cacheInterceptors(interceptors, beanClass, beanMethod);
}
}
}
/**
* Identifies bean interfaces
*
* @param beanClass
*/
private void indentifyInterfaces(Class<?> beanClass) {
Class<?>[] remoteInterface = null;
Class<?>[] localInterface = null;
Class<?>[] interfaces;
List<Class<?>> interfacesList;
Remote remote = beanClass.getAnnotation(Remote.class);
Local local = beanClass.getAnnotation(Local.class);
interfaces = beanClass.getInterfaces();
if (ObjectUtils.notNull(remote)) {
remoteInterface = remote.value();
}
interfacesList = new ArrayList<Class<?>>();
for (Class<?> interfaceClass : interfaces) {
if (interfaceClass.isAnnotationPresent(Remote.class))
interfacesList.add(interfaceClass);
}
if (ObjectUtils.available(interfacesList)) {
remoteInterface = interfacesList
.toArray(new Class<?>[interfacesList.size()]);
}
if (ObjectUtils.notNull(local)) {
localInterface = local.value();
}
interfacesList = new ArrayList<Class<?>>();
for (Class<?> interfaceClass : interfaces) {
if (interfaceClass.isAnnotationPresent(Local.class))
interfacesList.add(interfaceClass);
}
if (ObjectUtils.available(interfacesList)) {
localInterface = interfacesList
.toArray(new Class<?>[interfacesList.size()]);
}
if (ObjectUtils.notAvailable(localInterface)
&& ObjectUtils.notAvailable(remoteInterface)) {
localInterface = interfaces;
}
metaData.setLocalInterfaces(localInterface);
metaData.setRemoteInterfaces(remoteInterface);
}
/**
* Loads and caches bean {@link Class} by name
*
* @return
* @throws IOException
*/
private String createBeanClass() throws IOException {
try {
Class<?> beanClass = MetaUtils.classForName(className,
Boolean.FALSE, loader);
checkOnTransactional(beanClass);
String beanEjbName = BeanUtils.beanName(beanClass);
checkAndSetBean(beanEjbName);
if (RestCheck.check(beanClass)) {
RestUtils.add(beanClass);
}
createMeta(beanClass);
indentifyInterfaces(beanClass);
identifyInterceptors(beanClass);
metaData.setInProgress(Boolean.FALSE);
return beanEjbName;
} catch (IOException ex) {
releaseBlocker();
throw ex;
}
}
private String deployFile() {
String deployed = beanName;
ClassLoader currentLoader = getCurrent();
try {
LibraryLoader.loadCurrentLibraries(loader);
deployed = createBeanClass();
chekcWatch = WatchUtils.checkForWatch(deployData);
if (chekcWatch) {
URL url = deployData.getUrl();
url = WatchUtils.clearURL(url);
MetaContainer.addBeanName(url, deployed);
}
LOG.info(String.format("bean %s deployed", beanName));
} catch (IOException ex) {
LOG.error(String.format("Could not deploy bean %s cause %s",
beanName, ex.getMessage()), ex);
} finally {
LibraryLoader.loadCurrentLibraries(currentLoader);
}
return deployed;
}
private String deployExtracted() {
String deployed;
synchronized (tmpFiles) {
try {
deployed = deployFile();
} finally {
tmpFiles.notifyAll();
}
}
return deployed;
}
private String deploy() {
synchronized (metaData) {
String deployed;
try {
if (ObjectUtils.notNull(tmpFiles)) {
deployed = deployExtracted();
} else {
deployed = deployFile();
}
} catch (Exception ex) {
LOG.error(ex.getMessage(), ex);
deployed = null;
} finally {
releaseBlocker();
metaData.notifyAll();
}
return deployed;
}
}
@Override
public String call() throws Exception {
String deployed = deploy();
return deployed;
}
}
/**
* Contains parameters for bean deploy classes
*
* @author levan
*
*/
public static class BeanParameters {
public MetaCreator creator;
public String className;
public String beanName;
public ClassLoader loader;
public List<File> tmpFiles;
public CountDownLatch blocker;
public MetaData metaData;
public DeployData deployData;
public boolean server;
public Configuration configuration;
}
/**
* Contains parameters for data source deploy classes
*
* @author levan
*
*/
public static class DataSourceParameters {
public DataSourceInitializer initializer;
public Properties properties;
public Properties poolProperties;
public String poolPath;
public CountDownLatch dsLatch;
}
private static ClassLoader getCurrent() {
ClassLoader current;
MetaCreator creator = MetaContainer.getCreator();
ClassLoader creatorLoader;
if (ObjectUtils.notNull(creator)) {
creatorLoader = creator.getCurrent();
if (ObjectUtils.notNull(creatorLoader)) {
current = creatorLoader;
} else {
current = LibraryLoader.getContextClassLoader();
}
} else {
current = LibraryLoader.getContextClassLoader();
}
return current;
}
/**
* Creates and starts bean deployment process
*
* @param creator
* @param className
* @param loader
* @param tmpFiles
* @param conn
* @return {@link Future}
* @throws IOException
*/
public static Future<String> loadBean(BeanParameters parameters)
throws IOException {
parameters.metaData = new MetaData();
String beanName = BeanUtils.parseName(parameters.className);
parameters.beanName = beanName;
BeanDeployer beanDeployer = new BeanDeployer(parameters);
Future<String> future = LOADER_POOL.submit(beanDeployer);
return future;
}
/**
* Initialized {@link javax.sql.DataSource}s in parallel mode
*
* @param initializer
* @param properties
* @param sdLatch
*/
public static void initializeDatasource(DataSourceParameters parameters)
throws IOException {
final ConnectionDeployer connectionDeployer = new ConnectionDeployer(
parameters);
Callable<Boolean> privileged = AccessController
.doPrivileged(new ContextLoaderAction<Boolean>(
connectionDeployer));
LOADER_POOL.submit(privileged);
}
/**
* Creates and starts temporal resources removal process
*
* @param tmpFiles
*/
public static <V> void removeResources(List<File> tmpFiles) {
ResourceCleaner cleaner = new ResourceCleaner(tmpFiles);
Callable<Boolean> privileged = AccessController
.doPrivileged(new ContextLoaderAction<Boolean>(cleaner));
LOADER_POOL.submit(privileged);
}
}
|
package org.lightmare.deploy.fs;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.Path;
import java.nio.file.StandardWatchEventKinds;
import java.nio.file.WatchEvent;
import java.nio.file.WatchEvent.Kind;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.log4j.Logger;
import org.lightmare.cache.ConnectionContainer;
import org.lightmare.cache.DeploymentDirectory;
import org.lightmare.cache.MetaContainer;
import org.lightmare.cache.RestContainer;
import org.lightmare.config.Configuration;
import org.lightmare.jpa.datasource.FileParsers;
import org.lightmare.jpa.datasource.Initializer;
import org.lightmare.rest.providers.RestProvider;
import org.lightmare.utils.CollectionUtils;
import org.lightmare.utils.LogUtils;
import org.lightmare.utils.ObjectUtils;
import org.lightmare.utils.concurrent.ThreadFactoryUtil;
import org.lightmare.utils.fs.WatchUtils;
/**
* Deployment manager, {@link Watcher#deployFile(URL)},
* {@link Watcher#undeployFile(URL)}, {@link Watcher#listDeployments()} and
* {@link File} modification event handler for deployments if java version is
* 1.7 or above
*
* @author levan
* @since 0.0.45-SNAPSHOT
*/
public class Watcher implements Runnable {
// Name of deployment watch service thread
private static final String DEPLOY_THREAD_NAME = "watch_thread";
// Priority of deployment watch service thread
private static final int DEPLOY_POOL_PRIORITY = Thread.MAX_PRIORITY - 5;
// Sleep time of thread between watch service status scans
private static final long SLEEP_TIME = 5500L;
// Thread pool for watch service threads
private static final ExecutorService DEPLOY_POOL = Executors
.newSingleThreadExecutor(new ThreadFactoryUtil(DEPLOY_THREAD_NAME,
DEPLOY_POOL_PRIORITY));
private Set<DeploymentDirectory> deployments;
private Set<String> dataSources;
private static final Logger LOG = Logger.getLogger(Watcher.class);
/**
* Defines file types for watch service
*
* @author Levan
* @since 0.0.45-SNAPSHOT
*/
private static enum WatchFileType {
DATA_SOURCE, DEPLOYMENT, NONE;
}
/**
* To filter only deployed sub files from directory
*
* @author levan
* @since 0.0.45-SNAPSHOT
*/
private static class DeployFiletr implements FileFilter {
@Override
public boolean accept(File file) {
boolean accept;
try {
URL url = file.toURI().toURL();
url = WatchUtils.clearURL(url);
accept = MetaContainer.chackDeployment(url);
} catch (MalformedURLException ex) {
LOG.error(ex.getMessage(), ex);
accept = false;
} catch (IOException ex) {
LOG.error(ex.getMessage(), ex);
accept = false;
}
return accept;
}
}
private Watcher() {
deployments = getDeployDirectories();
dataSources = getDataSourcePaths();
}
/**
* Clears and gets file {@link URL} by file name
*
* @param fileName
* @return {@link URL}
* @throws IOException
*/
private static URL getAppropriateURL(String fileName) throws IOException {
File file = new File(fileName);
URL url = file.toURI().toURL();
url = WatchUtils.clearURL(url);
return url;
}
/**
* Gets {@link Set} of {@link DeploymentDirectory} instances from
* configuration
*
* @return {@link Set}<code><DeploymentDirectory></code>
*/
private static Set<DeploymentDirectory> getDeployDirectories() {
Collection<Configuration> configs = MetaContainer.CONFIGS.values();
Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>();
Set<DeploymentDirectory> deploymetDirssCurrent;
for (Configuration config : configs) {
deploymetDirssCurrent = config.getDeploymentPath();
if (config.isWatchStatus()
&& CollectionUtils.valid(deploymetDirssCurrent)) {
deploymetDirss.addAll(deploymetDirssCurrent);
}
}
return deploymetDirss;
}
/**
* Gets {@link Set} of data source paths from configuration
*
* @return {@link Set}<code><String></code>
*/
private static Set<String> getDataSourcePaths() {
Collection<Configuration> configs = MetaContainer.CONFIGS.values();
Set<String> paths = new HashSet<String>();
Set<String> pathsCurrent;
for (Configuration config : configs) {
pathsCurrent = config.getDataSourcePath();
if (config.isWatchStatus() && CollectionUtils.valid(pathsCurrent)) {
paths.addAll(pathsCurrent);
}
}
return paths;
}
/**
* Checks and gets appropriated {@link WatchFileType} by passed file name
*
* @param fileName
* @return {@link WatchFileType}
*/
private static WatchFileType checkType(String fileName) {
WatchFileType type;
File file = new File(fileName);
String path = file.getPath();
String filePath = WatchUtils.clearPath(path);
path = file.getParent();
String parentPath = WatchUtils.clearPath(path);
Set<DeploymentDirectory> apps = getDeployDirectories();
Set<String> dss = getDataSourcePaths();
if (CollectionUtils.valid(apps)) {
String deploymantPath;
Iterator<DeploymentDirectory> iterator = apps.iterator();
boolean notDeployment = Boolean.TRUE;
DeploymentDirectory deployment;
while (iterator.hasNext() && notDeployment) {
deployment = iterator.next();
deploymantPath = deployment.getPath();
notDeployment = ObjectUtils.notEquals(deploymantPath,
parentPath);
}
if (notDeployment) {
type = WatchFileType.NONE;
} else {
type = WatchFileType.DEPLOYMENT;
}
} else if (CollectionUtils.valid(dss) && dss.contains(filePath)) {
type = WatchFileType.DATA_SOURCE;
} else {
type = WatchFileType.NONE;
}
return type;
}
private static void fillFileList(File[] files, List<File> list) {
if (CollectionUtils.valid(files)) {
for (File file : files) {
list.add(file);
}
}
}
/**
* Lists all deployed {@link File}s
*
* @return {@link List}<File>
*/
public static List<File> listDeployments() {
Collection<Configuration> configs = MetaContainer.CONFIGS.values();
Set<DeploymentDirectory> deploymetDirss = new HashSet<DeploymentDirectory>();
Set<DeploymentDirectory> deploymetDirssCurrent;
for (Configuration config : configs) {
deploymetDirssCurrent = config.getDeploymentPath();
if (CollectionUtils.valid(deploymetDirssCurrent)) {
deploymetDirss.addAll(deploymetDirssCurrent);
}
}
File[] files;
List<File> list = new ArrayList<File>();
if (CollectionUtils.valid(deploymetDirss)) {
String path;
DeployFiletr filter = new DeployFiletr();
for (DeploymentDirectory deployment : deploymetDirss) {
path = deployment.getPath();
files = new File(path).listFiles(filter);
fillFileList(files, list);
}
}
return list;
}
/**
* Lists all data source {@link File}s
*
* @return {@link List}<File>
*/
public static List<File> listDataSources() {
Collection<Configuration> configs = MetaContainer.CONFIGS.values();
Set<String> paths = new HashSet<String>();
Set<String> pathsCurrent;
for (Configuration config : configs) {
pathsCurrent = config.getDataSourcePath();
if (CollectionUtils.valid(pathsCurrent)) {
paths.addAll(pathsCurrent);
}
}
File file;
List<File> list = new ArrayList<File>();
if (CollectionUtils.valid(paths)) {
for (String path : paths) {
file = new File(path);
list.add(file);
}
}
return list;
}
/**
* Deploys application or data source file by passed file name
*
* @param fileName
* @throws IOException
*/
public static void deployFile(String fileName) throws IOException {
WatchFileType type = checkType(fileName);
if (type.equals(WatchFileType.DATA_SOURCE)) {
FileParsers fileParsers = new FileParsers();
fileParsers.parseStandaloneXml(fileName);
} else if (type.equals(WatchFileType.DEPLOYMENT)) {
URL url = getAppropriateURL(fileName);
deployFile(url);
}
}
/**
* Deploys application or data source file by passed {@link URL} instance
*
* @param url
* @throws IOException
*/
public static void deployFile(URL url) throws IOException {
URL[] archives = { url };
MetaContainer.getCreator().scanForBeans(archives);
}
/**
* Removes from deployments application or data source file by passed
* {@link URL} instance
*
* @param url
* @throws IOException
*/
public static void undeployFile(URL url) throws IOException {
boolean valid = MetaContainer.undeploy(url);
if (valid && RestContainer.hasRest()) {
RestProvider.reload();
}
}
/**
* Removes from deployments application or data source file by passed file
* name
*
* @param fileName
* @throws IOException
*/
public static void undeployFile(String fileName) throws IOException {
WatchFileType type = checkType(fileName);
if (type.equals(WatchFileType.DATA_SOURCE)) {
Initializer.undeploy(fileName);
} else if (type.equals(WatchFileType.DEPLOYMENT)) {
URL url = getAppropriateURL(fileName);
undeployFile(url);
}
}
/**
* Removes from deployments and deploys again application or data source
* file by passed file name
*
* @param fileName
* @throws IOException
*/
public static void redeployFile(String fileName) throws IOException {
undeployFile(fileName);
deployFile(fileName);
}
/**
* Handles file change event
*
* @param dir
* @param currentEvent
* @throws IOException
*/
private void handleEvent(Path dir, WatchEvent<Path> currentEvent)
throws IOException {
if (ObjectUtils.notNull(currentEvent)) {
Path prePath = currentEvent.context();
Path path = dir.resolve(prePath);
String fileName = path.toString();
int count = currentEvent.count();
Kind<?> kind = currentEvent.kind();
if (kind == StandardWatchEventKinds.ENTRY_MODIFY) {
LogUtils.info(LOG, "Modify: %s, count: %s\n", fileName, count);
redeployFile(fileName);
} else if (kind == StandardWatchEventKinds.ENTRY_DELETE) {
LogUtils.info(LOG, "Delete: %s, count: %s\n", fileName, count);
undeployFile(fileName);
} else if (kind == StandardWatchEventKinds.ENTRY_CREATE) {
LogUtils.info(LOG, "Create: %s, count: %s\n", fileName, count);
redeployFile(fileName);
}
}
}
/**
* Runs file watch service
*
* @param watch
* @throws IOException
*/
private void runService(WatchService watch) throws IOException {
Path dir;
boolean toRun = true;
boolean valid;
while (toRun) {
try {
WatchKey key;
key = watch.take();
List<WatchEvent<?>> events = key.pollEvents();
WatchEvent<?> currentEvent = null;
WatchEvent<Path> typedCurrentEvent;
int times = 0;
dir = (Path) key.watchable();
for (WatchEvent<?> event : events) {
if (event.kind() == StandardWatchEventKinds.OVERFLOW) {
continue;
}
if (times == 0 || event.count() > currentEvent.count()) {
currentEvent = event;
}
times++;
valid = key.reset();
toRun = valid && key.isValid();
if (toRun) {
Thread.sleep(SLEEP_TIME);
typedCurrentEvent = ObjectUtils.cast(currentEvent);
handleEvent(dir, typedCurrentEvent);
}
}
} catch (InterruptedException ex) {
throw new IOException(ex);
}
}
}
/**
* Registers path to watch service
*
* @param fs
* @param path
* @param watch
* @throws IOException
*/
private void registerPath(FileSystem fs, String path, WatchService watch)
throws IOException {
Path deployPath = fs.getPath(path);
deployPath.register(watch, StandardWatchEventKinds.ENTRY_CREATE,
StandardWatchEventKinds.ENTRY_MODIFY,
StandardWatchEventKinds.OVERFLOW,
StandardWatchEventKinds.ENTRY_DELETE);
runService(watch);
}
/**
* Registers passed {@link File} array to watch service
*
* @param files
* @param fs
* @param watch
* @throws IOException
*/
private void registerPaths(File[] files, FileSystem fs, WatchService watch)
throws IOException {
String path;
for (File file : files) {
path = file.getPath();
registerPath(fs, path, watch);
}
}
/**
* Registers deployments directories to watch service
*
* @param deploymentDirss
* @param fs
* @param watch
* @throws IOException
*/
private void registerPaths(Collection<DeploymentDirectory> deploymentDirss,
FileSystem fs, WatchService watch) throws IOException {
String path;
boolean scan;
File directory;
File[] files;
for (DeploymentDirectory deployment : deploymentDirss) {
path = deployment.getPath();
scan = deployment.isScan();
if (scan) {
directory = new File(path);
files = directory.listFiles();
if (CollectionUtils.valid(files)) {
registerPaths(files, fs, watch);
}
} else {
registerPath(fs, path, watch);
}
}
}
/**
* Registers data source path to watch service
*
* @param paths
* @param fs
* @param watch
* @throws IOException
*/
private void registerDsPaths(Collection<String> paths, FileSystem fs,
WatchService watch) throws IOException {
for (String path : paths) {
registerPath(fs, path, watch);
}
}
@Override
public void run() {
try {
FileSystem fs = FileSystems.getDefault();
WatchService watch = null;
try {
watch = fs.newWatchService();
} catch (IOException ex) {
LOG.error(ex.getMessage(), ex);
throw ex;
}
if (CollectionUtils.valid(deployments)) {
registerPaths(deployments, fs, watch);
}
if (CollectionUtils.valid(dataSources)) {
registerDsPaths(dataSources, fs, watch);
}
} catch (IOException ex) {
LOG.fatal(ex.getMessage(), ex);
LOG.fatal("system going to shut down cause of hot deployment");
try {
ConnectionContainer.closeConnections();
} catch (IOException iex) {
LOG.fatal(iex.getMessage(), iex);
}
System.exit(-1);
} finally {
DEPLOY_POOL.shutdown();
}
}
/**
* Starts watch service for application and data source files
*/
public static void startWatch() {
Watcher watcher = new Watcher();
DEPLOY_POOL.submit(watcher);
}
}
|
package org.lightmare.utils;
import java.io.Closeable;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Utility class to help with general object checks
*
* @author levan
*
*/
public class ObjectUtils {
public static final int EMPTY_ARRAY_LENGTH = 0;
public static final Object[] EMPTY_ARRAY = {};
public static final int FIRST_INDEX = 0;
public static final int SECOND_INDEX = 1;
/**
* Checks if passed boolean value is not true
*
* @param statement
* @return <code>boolean</code>
*/
public static boolean notTrue(boolean statement) {
return !statement;
}
/**
* Chanks if passed boolean value is false
*
* @param data
* @return <code>boolean</code>
*/
public static boolean isFalse(Boolean data) {
return !data;
}
/**
* Checks if passed object is not null
*
* @param data
* @return <code>boolean</code>
*/
public static boolean notNull(Object data) {
return (data != null);
}
/**
* Checks if not a single object passed objects is not null
*
* @param datas
* @return <code>boolean</code>
*/
public static boolean notNullAll(Object... datas) {
boolean valid = notNull(datas);
if (valid) {
int length = datas.length;
for (int i = 0; i < length && valid; i++) {
valid = notNull(datas[i]);
}
}
return valid;
}
/**
* Checks if passed {@link Collection} instance is not empty
*
* @param collection
* @return <code>boolean</code>
*/
public static boolean notEmpty(Collection<?> collection) {
return !collection.isEmpty();
}
/**
* Checks passed {@link Collection} instance on null and on emptiness
* returns true if it is not null and is not empty
*
* @param collection
* @return <code></code>
*/
public static boolean available(Collection<?> collection) {
return collection != null && !collection.isEmpty();
}
/**
* Checks passed {@link Map} instance on null and emptiness returns true if
* it is not null and is not empty
*
* @param map
* @return <code>boolean</code>
*/
public static boolean available(Map<?, ?> map) {
return map != null && !map.isEmpty();
}
/**
* Checks if passed {@link Map} instance is null or is empty
*
* @param map
* @return <code>boolean</code>
*/
public static boolean notAvailable(Map<?, ?> map) {
return !available(map);
}
/**
* Checks if passed {@link Collection} instance is null or is empty
*
* @param collection
* @return <code>boolean</code>
*/
public static boolean notAvailable(Collection<?> collection) {
return !available(collection);
}
/**
* Checks if there is null or empty {@link Collection} instance is passed
* collections
*
* @param collections
* @return <code>boolean</code>
*/
public static boolean notAvailable(Collection<?>... collections) {
return !available(collections);
}
public static boolean availableAll(Map<?, ?>... maps) {
boolean avaliable = notNull(maps);
if (avaliable) {
Map<?, ?> map;
for (int i = 0; i < maps.length && avaliable; i++) {
map = maps[i];
avaliable = avaliable && available(map);
}
}
return avaliable;
}
public static boolean available(Object[] array) {
return array != null && array.length > 0;
}
public static boolean notAvailable(Object[] array) {
return !available(array);
}
public static boolean available(CharSequence chars) {
return chars != null && chars.length() > 0;
}
public static boolean notAvailable(CharSequence chars) {
return !available(chars);
}
public static boolean availableAll(Collection<?>... collections) {
boolean avaliable = notNull(collections);
if (avaliable) {
Collection<?> collection;
for (int i = 0; i < collections.length && avaliable; i++) {
collection = collections[i];
avaliable = avaliable && available(collection);
}
}
return avaliable;
}
public static boolean availableAll(Object[]... arrays) {
boolean avaliable = notNull(arrays);
if (avaliable) {
Object[] collection;
for (int i = 0; i < arrays.length && avaliable; i++) {
collection = arrays[i];
avaliable = avaliable && available(collection);
}
}
return avaliable;
}
/**
* Converts passed {@link Collection} to array of appropriated {@link Class}
* type
*
* @param collection
* @param type
* @return <code>T[]</code>
*/
@SuppressWarnings("unchecked")
public static <T> T[] toArray(Collection<T> collection, Class<T> type) {
T[] array;
if (notNull(collection)) {
array = (T[]) Array.newInstance(type, collection.size());
array = collection.toArray(array);
} else {
array = null;
}
return array;
}
/**
* Creates empty array of passed type
*
* @param type
* @return <code>T[]</code>
*/
public static <T> T[] emptyArray(Class<T> type) {
@SuppressWarnings("unchecked")
T[] empty = (T[]) Array.newInstance(type, EMPTY_ARRAY_LENGTH);
return empty;
}
/**
* Peaks first element from list
*
* @param list
* @return T
*/
private static <T> T getFirstFromList(List<T> list) {
T value;
if (available(list)) {
value = list.get(FIRST_INDEX);
} else {
value = null;
}
return value;
}
/**
* Peaks first element from collection
*
* @param collection
* @return T
*/
public static <T> T getFirst(Collection<T> collection) {
T value;
if (available(collection)) {
if (collection instanceof List) {
value = getFirstFromList(((List<T>) collection));
} else {
Iterator<T> iterator = collection.iterator();
value = iterator.next();
}
} else {
value = null;
}
return value;
}
/**
* Peaks first element from array
*
* @param collection
* @return T
*/
public static <T> T getFirst(T[] values) {
T value;
if (available(values)) {
value = values[FIRST_INDEX];
} else {
value = null;
}
return value;
}
/**
* Gets value from passed {@link Map} as other {@link Map} instance
*
* @param key
* @param from
* @return {@link Map}<K,V>
*/
@SuppressWarnings("unchecked")
public static <K, V> Map<K, V> getAsMap(Object key, Map<?, ?> from) {
Map<K, V> result;
if (ObjectUtils.available(from)) {
Object objectValue = from.get(key);
if (objectValue instanceof Map) {
result = (Map<K, V>) objectValue;
} else {
result = null;
}
} else {
result = null;
}
return result;
}
/**
* Gets values from passed {@link Map} as other {@link Map} instance
* recursively by passed keys array
*
* @param from
* @param keys
* @return {@link Map}
*/
public static Map<?, ?> getAsMap(Map<?, ?> from, Object... keys) {
Map<?, ?> result = from;
int length = keys.length;
Object key;
for (int i = 0; i < length && ObjectUtils.notNull(result); i++) {
key = keys[i];
result = getAsMap(key, result);
}
return result;
}
/**
* Gets values from passed {@link Map} as other {@link Map} instance
* recursively by passed keys array and for first key get value from last
* {@link Map} instance
*
* @param from
* @param keys
* @return <code>V</code>
*/
@SuppressWarnings("unchecked")
public static <V> V getSubValue(Map<?, ?> from, Object... keys) {
V value;
int length = keys.length - 1;
Object[] subKeys = new Object[length];
Object key = keys[length];
for (int i = 0; i < length; i++) {
subKeys[i] = keys[i];
}
Map<?, ?> result = getAsMap(from, subKeys);
if (ObjectUtils.available(result)) {
value = (V) result.get(key);
} else {
value = null;
}
return value;
}
/**
* Checks if passed {@link Closeable} instance is not null and if not calls
* {@link Closeable#close()} method
*
* @param closeable
* @throws IOException
*/
public static void close(Closeable closeable) throws IOException {
if (ObjectUtils.notNull(closeable)) {
closeable.close();
}
}
/**
* Checks if passed array of {@link Closeable}'s is valid and closes all of
* them
*
* @param closeables
* @throws IOException
*/
public static void closeAll(Closeable... closeables) throws IOException {
if (available(closeables)) {
for (Closeable closeable : closeables) {
close(closeable);
}
}
}
}
|
package org.lima.parser.sstax;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.lima.parser.sstax.element.XMLListElement;
import org.lima.parser.sstax.element.XMLMapElement;
import org.lima.parser.sstax.element.XMLObjectElement;
import org.lima.parser.sstax.element.XMLPrimitiveElement;
import org.lima.parser.sstax.element.XMLStringElement;
import org.lima.parser.sstax.util.GenericUtil;
public class XMLParser {
private XMLStreamReader xr = null;
private int depth = 0;
public XMLParser(String file) throws FileNotFoundException, XMLStreamException {
XMLInputFactory factory = XMLInputFactory.newInstance();
this.xr = factory.createXMLStreamReader(new FileInputStream(new File(file)));
this.depth = 0;
}
public String parseString(String key) throws Exception {
if(seekNext(key)) {
return new XMLStringElement().parse(this);
}
return null;
}
public Integer parseInteger(String key) throws Exception {
if(seekNext(key)) {
return new XMLPrimitiveElement<Integer>(Integer.class).parse(this);
}
return null;
}
public Float parseFloat(String key) throws Exception {
if(seekNext(key)) {
return new XMLPrimitiveElement<Float>(Float.class).parse(this);
}
return null;
}
public Double parseDouble(String key) throws Exception {
if(seekNext(key)) {
return new XMLPrimitiveElement<Double>(Double.class).parse(this);
}
return null;
}
public Long parseLong(String key) throws Exception {
if(seekNext(key)) {
return new XMLPrimitiveElement<Long>(Long.class).parse(this);
}
return null;
}
public Boolean parseBoolean(String key) throws Exception {
if(seekNext(key)) {
return new XMLPrimitiveElement<Boolean>(Boolean.class).parse(this);
}
return null;
}
public Map<String, Object> parseMap(String key) throws Exception {
if(seekNext(key)) {
return new XMLMapElement(new XMLStringElement()).parse(this);
}
return null;
}
public <E> List<E> parseList(String key, Class<E> clazz) throws Exception {
XMLListElement<E> listElement = getListElement(key, clazz);
if(listElement != null) {
return listElement.parse(this);
}
return null;
}
public <E> E parseObject(String key, Class<E> clazz) throws Exception {
if(seekNext(key)) {
return new XMLObjectElement<E>(clazz).parse(this);
}
return null;
}
public class XMLListIterator<E> implements Iterator<E> {
private XMLParser parser;
private XMLListElement<E> listElement;
private E parsedElement;
private XMLListIterator (XMLParser parser, XMLListElement<E> listElement) {
this.parser = parser;
this.listElement = listElement;
this.parsedElement = null;
}
public boolean hasNext() {
if(parsedElement != null) {
return true;
}
else {
try {
parsedElement = listElement.parseNext(parser);
} catch (XMLStreamException e) {
e.printStackTrace();
}
}
return (parsedElement!=null)?true:false;
}
public E next() {
E nextElement = null;
if(parsedElement != null) {
nextElement = parsedElement;
parsedElement = null;
}
else {
try {
nextElement = listElement.parseNext(parser);
} catch (XMLStreamException e) {
e.printStackTrace();
}
}
return nextElement;
}
public void remove() { }
}
public <E> Iterator<E> getListIterator(String key, Class<E> clazz) throws Exception {
Iterator<E> xmlListIterator = null;
XMLListElement<E> listElement = getListElement(key, clazz);
if(listElement != null) {
xmlListIterator = new XMLListIterator<E>(this, listElement);
}
return xmlListIterator;
}
private <E> XMLListElement<E> getListElement(String key, Class<E> clazz) throws Exception {
if(seekNext(key)) {
if(Collection.class.isAssignableFrom(clazz)
|| Map.class.isAssignableFrom(clazz)) {
throw new Exception("not support type: " + clazz);
}
else if(GenericUtil.isPrimitiveType(clazz)) {
return new XMLListElement<E>(null, new XMLPrimitiveElement<E>(clazz));
}
else {
return new XMLListElement<E>(null, new XMLObjectElement<E>(clazz));
}
}
return null;
}
public XMLStreamReader getReader() {
return xr;
}
public int getDepth() {
return depth;
}
public int incDepth() {
return ++depth;
}
public int decDepth() {
return --depth;
}
private boolean seekNext(String key) throws XMLStreamException {
while(xr.hasNext()) {
switch(xr.next()) {
case XMLStreamConstants.START_ELEMENT:
depth++;
if(key.equals(xr.getLocalName())) {
return true;
}
break;
case XMLStreamConstants.END_ELEMENT:
depth
}
}
return false;
}
}
|
package org.sameas.sameas4j.cache;
/**
*
* @version $Id$
*/
public interface Cache {
<T> void put(CacheKey cacheKey, T cacheValue);
<T> T get(CacheKey cacheKey);
}
|
package org.scribe.builder.api;
import org.scribe.model.Token;
public class PlurkApi extends DefaultApi10a
{
private static final String REQUEST_TOKEN_URL = "http:
private static final String AUTHORIZE_URL = "http:
private static final String ACCESS_TOKEN_URL = "http:
@Override
public String getRequestTokenEndpoint()
{
return REQUEST_TOKEN_URL;
}
@Override
public String getAuthorizationUrl(Token requestToken)
{
return String.format(AUTHORIZE_URL, requestToken.getToken());
}
@Override
public String getAccessTokenEndpoint()
{
return ACCESS_TOKEN_URL;
}
}
|
package org.sd.atn;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.sd.token.Token;
import org.sd.util.Usage;
import org.sd.util.tree.NodePath;
import org.sd.util.tree.Tree;
import org.sd.xml.DomElement;
import org.sd.xml.DomNode;
/**
* AtnParseSelector that chooses the longest parse.
* <p>
* @author Spence Koehler
*/
@Usage(notes =
"An org.sd.atn.AtnParseSelector implementation that\n" +
"chooses the longest parse(s), meaning those that cover the\n" +
"most input"
)
public class LongestParseSelector implements AtnParseSelector {
private boolean simplest;
private boolean onlyfirst;
private NodePath<String> weightPath;
private List<NodePath<String>> preferPaths;
/**
* Attribute 'simplest' (default 'true') accepts only longest parses with
* the fewest number of nodes in their parse tree.
* <p>
* Attribute 'onlyfirst' (default 'false') accepts only the first (longest/
* simplest) parse.
* <p>
* Attribute 'weight' (default null) chooses the parse with the "weightier"
* parse (more characters) under the given node path.
* <p>
* Attribute 'prefer' (default null) specifies preferrence order of parses
* having (comma delimited) node path matches.
*/
public LongestParseSelector(DomNode domNode, ResourceManager resourceManager) {
final DomElement domElement = (DomElement)domNode;
this.simplest = domElement.getAttributeBoolean("simplest", true);
this.onlyfirst = domElement.getAttributeBoolean("onlyfirst", false);
this.weightPath = null;
this.preferPaths = buildNodePaths(domElement.getAttributeValue("prefer", null));
final String weightString = domElement.getAttributeValue("weight", null);
if (weightString != null) {
this.weightPath = new NodePath<String>(weightString);
}
}
LongestParseSelector(boolean simplest, boolean onlyfirst) {
this.simplest = simplest;
this.onlyfirst = onlyfirst;
this.weightPath = null;
}
protected boolean getSimplest() {
return simplest;
}
protected void setSimplest(boolean simplest) {
this.simplest = simplest;
}
protected boolean getOnlyFirst() {
return onlyfirst;
}
protected void setOnlyFirst(boolean onlyfirst) {
this.onlyfirst = onlyfirst;
}
public List<AtnParse> selectParses(AtnParseResult parseResult) {
final List<ParseData> parseDatas = new ArrayList<ParseData>();
for (int parseIndex = 0; parseIndex < parseResult.getNumParses(); ++parseIndex) {
final AtnParse parse = parseResult.getParse(parseIndex);
if (!parse.getSelected()) continue;
parseDatas.add(new ParseData(parse, simplest, weightPath, preferPaths));
}
try {
Collections.sort(parseDatas);
}
catch (IllegalArgumentException e) {
//TODO: simplify ParseData compare so this doesn't happen.
// in the meantime, it seems to happen in inconsequential cases.
final boolean stopHere = true;
}
final Set<Integer> selected = new HashSet<Integer>();
ParseData lastParseData = null;
for (ParseData parseData : parseDatas) {
if (lastParseData == null) {
selected.add(parseData.getAtnParse().getParseNum());
}
else if (parseData.compareTo(lastParseData) != 0) {
break;
}
else {
selected.add(parseData.getAtnParse().getParseNum());
}
lastParseData = parseData;
}
final List<AtnParse> result = new ArrayList<AtnParse>();
String note = "Not longest";
// collect non-duplicate selected parses
for (int parseIndex = 0; parseIndex < parseResult.getNumParses(); ++parseIndex) {
final AtnParse parse = parseResult.getParse(parseIndex);
boolean select = selected.contains(parse.getParseNum());
if (select && isDuplicate(parse, result)) {
note = "Is duplicate";
select = false;
}
if (onlyfirst && select && result.size() > 0) {
note = "Isn't first";
select = false;
}
parse.setSelected(select);
if (select) result.add(parse);
else parse.addNote(note);
}
return result;
}
private static final boolean isDuplicate(AtnParse parse, List<AtnParse> parses) {
boolean result = false;
final Tree<String> parseTree = parse.getParseTree();
for (AtnParse curParse : parses) {
final Tree<String> curParseTree = curParse.getParseTree();
if (parseTree.equals(curParseTree)) {
result = true;
break;
}
}
return result;
}
private static final List<NodePath<String>> buildNodePaths(String pathString) {
List<NodePath<String>> result = null;
if (pathString != null && !"".equals(pathString)) {
result = new ArrayList<NodePath<String>>();
final String[] paths = pathString.split("\\s*,\\s*");
for (String path : paths) {
result.add(new NodePath<String>(path));
}
}
return result;
}
private static final class ParseData implements Comparable<ParseData> {
private AtnParse atnParse;
private boolean simplest;
private NodePath<String> weightPath;
private List<NodePath<String>> preferData;
private List<Token> skipTokens;
private int skipCount;
private int length;
private int complexity; // the lower, the simpler
private int diversity; // num different tags
private int weight; // prefer higher
private int preferValue; // prefer lower (when >= 0)
private int prefPerplexity; // require equality when simplest + prefer
private boolean computedRulePattern = false;
private String _rulePattern = null;
ParseData(AtnParse atnParse, boolean simplest, NodePath<String> weightPath, List<NodePath<String>> preferData) {
this.atnParse = atnParse;
this.simplest = simplest;
this.weightPath = weightPath;
this.preferData = preferData;
this.skipTokens = null;
this.skipCount = 0;
this.length = 0;
this.complexity = 0;
this.diversity = 0;
this.weight = 0;
this.preferValue = -1;
this.prefPerplexity = 0;
initialize();
}
private final void initialize() {
this.length = atnParse.getEndIndex() - atnParse.getStartIndex();
for (Tree<AtnState> stateNode = atnParse.getEndState(); stateNode != null; stateNode = stateNode.getParent()) {
final AtnState pathState = stateNode.getData();
if (pathState == null) break;
++complexity;
if (pathState.isSkipped()) {
final Token skipToken = pathState.getInputToken();
if (skipTokens == null) skipTokens = new ArrayList<Token>();
skipTokens.add(skipToken);
skipCount += skipToken.getWordCount();
}
}
if (weightPath != null) {
final Tree<String> parseTree = atnParse.getParseTree();
final Set<String> tagNames = new HashSet<String>();
for (Tree<String> leafNode : parseTree.gatherLeaves()) {
tagNames.add(leafNode.getParent().getData());
}
this.diversity = tagNames.size();
final List<Tree<String>> weightedNodes = weightPath.apply(parseTree);
if (weightedNodes != null) {
for (Tree<String> weightedNode : weightedNodes) {
weight += weightedNode.gatherLeaves().size();
}
}
}
if (preferData != null) {
final Tree<String> parseTree = atnParse.getParseTree();
int idx = 0;
for (NodePath<String> preferPath : preferData) {
final List<Tree<String>> preferNodes = preferPath.apply(parseTree);
if (preferNodes != null) {
preferValue = idx;
prefPerplexity = computePerplexity(preferNodes);
break;
}
++idx;
}
}
}
private final int computePerplexity(List<Tree<String>> preferNodes) {
// defined as the minimum number of siblings for any (selected) node
// used as measure of "complexity" in context of preferred nodes
int result = Integer.MAX_VALUE;
for (Tree<String> preferNode : preferNodes) {
final int curResult = preferNode.getNumSiblings();
if (curResult < result) {
result = curResult;
}
}
return result;
}
public String toString() {
return atnParse.getParseTree().toString();
}
AtnParse getAtnParse() {
return atnParse;
}
public List<Token> getSkipTokens() {
return skipTokens;
}
public int getSkipCount() {
return skipCount;
}
public int getLength() {
return length;
}
public int getComplexity() {
return complexity;
}
public int getDiversity() {
return diversity;
}
public int getWeight() {
return weight;
}
public boolean equals(Object o) {
boolean result = (this == o);
if (!result && o instanceof ParseData) {
final ParseData other = (ParseData)o;
result = (this.compareTo(other) == 0);
}
return result;
}
public int hashCode() {
// rulePattern, simplest, skipCount, length, complexity, diversity, weight, preferValue
int result = 1;
final String rulePattern = getRulePattern();
if (rulePattern != null) {
result = (result * 17) + rulePattern.hashCode();
}
if (simplest) {
result = (result * 17) + 1;
}
result = (result * 17) + skipCount;
result = (result * 17) + length;
result = (result * 17) + complexity;
result = (result * 17) + diversity;
result = (result * 17) + weight;
result = (result * 17) + preferValue;
return result;
}
public int compareTo(ParseData other) {
int result = this == other ? 0 : -1;
if (result != 0) {
final boolean rulePatternMatch =
this.weight > 0 && other.weight > 0 &&
hasMatchingRulePattern(other) &&
diversity == other.getDiversity();
if (length == other.getLength() &&
(!simplest || complexity == other.getComplexity()) &&
skipCount == other.getSkipCount() &&
(!rulePatternMatch || weight == other.getWeight())) {
result = getPrefCompare(other);
}
else {
// the parse that skips fewer tokens comes first
result = compareSkips(other);
// when still unresolved, the longest parse comes first
if (result == 0) {
result = other.getLength() - length;
// when still unresolved, check preference order of parses
if (result == 0 && preferData != null) {
result = getPrefCompare(other);
}
// when still unresolved, the minimum complexity comes first
if (result == 0 && simplest) {
result = this.complexity - other.getComplexity();
}
// when still unresolved, the highest weight comes first
// but only in the context of a matching rule pattern (id or ruleName)
if (result == 0 && rulePatternMatch) {
result = other.getWeight() - this.weight;
}
}
}
}
return result;
}
private final int getPrefCompare(ParseData other) {
int result = 0;
if (preferData != null) {
if (this.preferValue >= 0 && other.preferValue >= 0) {
// as long as !simplest or prefPerplexities match,
if (!simplest || this.prefPerplexity == other.prefPerplexity) {
// lower prefer value comes first
result = this.preferValue - other.preferValue;
}
}
}
return result;
}
private final String getRulePattern() {
if (!computedRulePattern) {
final AtnRule rule = atnParse.getStartRule();
_rulePattern = rule.getRuleId();
if (_rulePattern == null) {
// fall back to rule name if no ID
_rulePattern = rule.getRuleName();
}
computedRulePattern = true;
}
return _rulePattern;
}
private final boolean hasMatchingRulePattern(ParseData other) {
final String pattern1 = this.getRulePattern();
final String pattern2 = other.getRulePattern();
final boolean result = (pattern1 == pattern2) || (pattern1 != null && pattern1.equals(pattern2));
return result;
}
private final int compareSkips(ParseData other) {
int result = 0;
if (skipCount > 0 || other.skipCount > 0) {
if (skipCount > 0 && other.skipCount > 0) {
final boolean otherReducesThis = reducesSkipCount(other);
final boolean thisReducesOther = other.reducesSkipCount(this);
if (otherReducesThis && !thisReducesOther) {
// prefer other
result = 1;
}
else if (thisReducesOther && !otherReducesThis) {
// prefer this
result = -1;
}
// else, no preference
}
else if (skipCount > 0) {
// if the other parse reduces this' skip count, other is better (+1)
if (reducesSkipCount(other)) {
result = 1;
}
}
else { // other.skipCount > 0
// if this reduces other's skip count, this is better (-1)
if (other.reducesSkipCount(this)) {
result = -1;
}
}
}
return result;
}
private final boolean reducesSkipCount(ParseData other) {
boolean result = false;
if (skipCount > 0) {
for (Token skipToken : skipTokens) {
final int overlap = other.overlapFlag(skipToken);
if (overlap != 1) {
result = true;
break;
}
}
}
return result;
}
/**
* Return
* <ul>
* <li>0 if token is fully covered by this parse;</li>
* <li>1 if token is fully outside this parse;</li>
* <li>-1 if token partially overlaps with this parse</li>
* </ul>
*/
private final int overlapFlag(Token token) {
int result = 0;
final int myStartIndex = atnParse.getStartIndex();
final int myEndIndex = atnParse.getEndIndex();
final int tokenStartIndex = token.getStartIndex();
final int tokenEndIndex = token.getEndIndex();
if (Token.encompasses(myStartIndex, myEndIndex, tokenStartIndex, tokenEndIndex)) {
// count how many of token's words are not skipped by this
if (skipCount == 0) {
// nothing is skipped, so all are covered
result = 0;
}
else {
// if any part of the token is skipped, then not all are covered
for (Token skipToken : skipTokens) {
if (skipToken.encompasses(token)) {
// token is entirely skipped, so not covered
result = 1;
break;
}
else if (skipToken.overlaps(token)) {
// token is partially skipped, so not entirely covered
result = -1;
}
}
}
}
else if (Token.overlaps(myStartIndex, myEndIndex, tokenStartIndex, tokenEndIndex)) {
// token is not entirely covered by this (and not at all if overlap is skipped)
result = -1;
}
else {
// else no coverage
result = 1;
}
return result;
}
}
}
|
package org.smoothbuild.lang.base;
import static java.util.stream.Collectors.joining;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
public class Scope<E> {
private final Scope<E> outerScope;
private final Map<String, ? extends E> bindings;
public Scope(Map<String, ? extends E> bindings) {
this(null, bindings);
}
public Scope(Scope<E> outerScope, Map<String, ? extends E> bindings) {
this.outerScope = outerScope;
this.bindings = bindings;
}
public boolean contains(String name) {
return bindings.containsKey(name) || (outerScope != null && outerScope.contains(name));
}
public E get(String name) {
if (bindings.containsKey(name)) {
return bindings.get(name);
}
if (outerScope != null) {
return outerScope.get(name);
}
throw new NoSuchElementException(name);
}
public Scope<E> outerScope() {
if (outerScope == null) {
throw new IllegalStateException("This is top level scope. It doesn't have outer scope.");
}
return outerScope;
}
public String namesToString() {
String outer = outerScope == null ? "" : outerScope.namesToString() + "\n";
String inner = prettyPrint(bindings.keySet());
return outer + inner;
}
@Override
public String toString() {
String outer = outerScope == null ? "" : outerScope.toString() + "\n";
String inner = prettyPrint(bindings.entrySet());
return outer + inner;
}
private String prettyPrint(Set<?> set) {
return set.stream()
.map(Object::toString)
.map(s -> indent() + s)
.collect(joining("\n"));
}
private String indent() {
return outerScope == null ? "" : outerScope.indent() + " ";
}
}
|
package code;
import java.awt.GridLayout;
import java.util.ArrayList;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.UIManager;
public class PlayerFrame {
private Scrabble _s;
private Inventory _inv;
private ArrayList<Player> _players;
private Board _board;
private int _numberOfPlayers;
public PlayerFrame(Scrabble s){
try {
UIManager.setLookAndFeel("com.sun.java.swing.plaf.motif.MotifLookAndFeel");
} catch(Exception e) {
e.printStackTrace();
}
_s = s;
JFrame frame = new JFrame("Tile Rack");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setSize(200, 200);
GridLayout grid = new GridLayout(7, 0, 0, 0);
frame.setLayout(grid);
for(int i=0; i<7; i++){
JButton j = new TileSpace();
frame.add(j);
}
frame.setVisible(true);
}
public void update(){
_inv = _s.getInv();
_players = _s.getPlayer();
_board = _s.getBoard();
_numberOfPlayers = _s.getNumofPlayers();
}
}
|
package org.testng.internal;
import org.testng.IClass;
import org.testng.IMethodSelector;
import org.testng.IObjectFactory;
import org.testng.IObjectFactory2;
import org.testng.ITestObjectFactory;
import org.testng.TestNGException;
import org.testng.TestRunner;
import org.testng.annotations.IAnnotation;
import org.testng.annotations.IFactoryAnnotation;
import org.testng.annotations.IParametersAnnotation;
import org.testng.internal.annotations.IAnnotationFinder;
import org.testng.internal.annotations.Sets;
import org.testng.junit.IJUnitTestRunner;
import org.testng.xml.XmlTest;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
/**
* Utility class for different class manipulations.
*/
public final class ClassHelper {
private static final String JUNIT_TESTRUNNER= "org.testng.junit.JUnitTestRunner";
private static final String JUNIT_4_TESTRUNNER = "org.testng.junit.JUnit4TestRunner";
/** The additional class loaders to find classes in. */
private static final List<ClassLoader> m_classLoaders = new Vector<ClassLoader>();
/** Add a class loader to the searchable loaders. */
public static void addClassLoader(final ClassLoader loader) {
m_classLoaders.add(loader);
}
/** Hide constructor. */
private ClassHelper() {
// Hide Constructor
}
public static <T> T newInstance(Class<T> clazz) {
try {
T instance = clazz.newInstance();
return instance;
}
catch(IllegalAccessException iae) {
throw new TestNGException("Class " + clazz.getName()
+ " does not have a no-args constructor", iae);
}
catch(InstantiationException ie) {
throw new TestNGException("Cannot instantiate class " + clazz.getName(), ie);
}
catch(ExceptionInInitializerError eiierr) {
throw new TestNGException("An exception occurred in static initialization of class "
+ clazz.getName(), eiierr);
}
catch(SecurityException se) {
throw new TestNGException(se);
}
}
/**
* Tries to load the specified class using the context ClassLoader or if none,
* than from the default ClassLoader. This method differs from the standard
* class loading methods in that it does not throw an exception if the class
* is not found but returns null instead.
*
* @param className the class name to be loaded.
*
* @return the class or null if the class is not found.
*/
public static Class<?> forName(final String className) {
Vector<ClassLoader> allClassLoaders = new Vector<ClassLoader>();
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
if (contextClassLoader != null) {
allClassLoaders.add(contextClassLoader);
}
if (m_classLoaders != null) {
allClassLoaders.addAll(m_classLoaders);
}
for (ClassLoader classLoader : allClassLoaders) {
if (null == classLoader) {
continue;
}
try {
return classLoader.loadClass(className);
}
catch(ClassNotFoundException ex) {
// With additional class loaders, it is legitimate to ignore ClassNotFoundException
if (null == m_classLoaders || m_classLoaders.size() == 0) {
logClassNotFoundError(className, ex);
}
}
}
try {
return Class.forName(className);
}
catch(ClassNotFoundException cnfe) {
logClassNotFoundError(className, cnfe);
return null;
}
}
private static void logClassNotFoundError(String className, Exception ex) {
Utils.log("ClassHelper", 2, "Could not instantiate " + className
+ " : Class doesn't exist (" + ex.getMessage() + ")");
}
/**
* For the given class, returns the method annotated with @Factory or null
* if none is found. This method does not search up the superclass hierarchy.
* If more than one method is @Factory annotated, a TestNGException is thrown.
* @param cls The class to search for the @Factory annotation.
* @param finder The finder (JDK 1.4 or JDK 5.0+) use to search for the annotation.
*
* @return the @Factory <CODE>method</CODE> or null
*
* FIXME: @Factory method must be public!
*/
public static ConstructorOrMethod findDeclaredFactoryMethod(Class<?> cls,
IAnnotationFinder finder) {
ConstructorOrMethod result = null;
for (Method method : cls.getMethods()) {
IFactoryAnnotation f = finder.findAnnotation(method, IFactoryAnnotation.class);
if (null != f) {
result = new ConstructorOrMethod(method);
result.setEnabled(f.getEnabled());
break;
}
}
if (result == null) {
for (Constructor constructor : cls.getDeclaredConstructors()) {
IAnnotation f = finder.findAnnotation(constructor, IFactoryAnnotation.class);
if (f != null) {
result = new ConstructorOrMethod(constructor);
}
}
}
// If we didn't find anything, look for nested classes
// if (null == result) {
// Class[] subClasses = cls.getClasses();
// for (Class subClass : subClasses) {
// result = findFactoryMethod(subClass, finder);
// if (null != result) {
// break;
// Found the method, verify that it returns an array of objects
// TBD
return result;
}
/**
* Extract all callable methods of a class and all its super (keeping in mind
* the Java access rules).
*
* @param clazz
* @return
*/
public static Set<Method> getAvailableMethods(Class<?> clazz) {
Set<Method> methods = Sets.newHashSet();
methods.addAll(Arrays.asList(clazz.getDeclaredMethods()));
Class<?> parent = clazz.getSuperclass();
while (Object.class != parent) {
methods.addAll(extractMethods(clazz, parent, methods));
parent = parent.getSuperclass();
}
return methods;
}
/**
* @param runner
* @return
*/
public static IJUnitTestRunner createTestRunner(TestRunner runner) {
try {
//try to get runner for JUnit 4 first
Class.forName("org.junit.Test");
IJUnitTestRunner tr = (IJUnitTestRunner) ClassHelper.forName(JUNIT_4_TESTRUNNER).newInstance();
tr.setTestResultNotifier(runner);
return tr;
} catch (Throwable t) {
Utils.log("ClassHelper", 2, "JUnit 4 was not found on the classpath");
try {
//fallback to JUnit 3
Class.forName("junit.framework.Test");
IJUnitTestRunner tr = (IJUnitTestRunner) ClassHelper.forName(JUNIT_TESTRUNNER).newInstance();
tr.setTestResultNotifier(runner);
return tr;
} catch (Exception ex) {
Utils.log("ClassHelper", 2, "JUnit 3 was not found on the classpath");
//there's no JUnit on the classpath
throw new TestNGException("Cannot create JUnit runner", ex);
}
}
}
private static Set<Method> extractMethods(Class<?> childClass, Class<?> clazz,
Set<Method> collected) {
Set<Method> methods = Sets.newHashSet();
Method[] declaredMethods = clazz.getDeclaredMethods();
Package childPackage = childClass.getPackage();
Package classPackage = clazz.getPackage();
boolean isSamePackage = false;
if ((null == childPackage) && (null == classPackage)) {
isSamePackage = true;
}
if ((null != childPackage) && (null != classPackage)) {
isSamePackage = childPackage.getName().equals(classPackage.getName());
}
for (Method method : declaredMethods) {
int methodModifiers = method.getModifiers();
if ((Modifier.isPublic(methodModifiers) || Modifier.isProtected(methodModifiers))
|| (isSamePackage && !Modifier.isPrivate(methodModifiers))) {
if (!isOverridden(method, collected) && !Modifier.isAbstract(methodModifiers)) {
methods.add(method);
}
}
}
return methods;
}
private static boolean isOverridden(Method method, Set<Method> collectedMethods) {
Class<?> methodClass = method.getDeclaringClass();
Class<?>[] methodParams = method.getParameterTypes();
for (Method m: collectedMethods) {
Class<?>[] paramTypes = m.getParameterTypes();
if (method.getName().equals(m.getName())
&& methodClass.isAssignableFrom(m.getDeclaringClass())
&& methodParams.length == paramTypes.length) {
boolean sameParameters = true;
for (int i= 0; i < methodParams.length; i++) {
if (!methodParams[i].equals(paramTypes[i])) {
sameParameters = false;
break;
}
}
if (sameParameters) {
return true;
}
}
}
return false;
}
public static IMethodSelector createSelector(org.testng.xml.XmlMethodSelector selector) {
try {
Class<?> cls = Class.forName(selector.getClassName());
return (IMethodSelector) cls.newInstance();
}
catch(Exception ex) {
throw new TestNGException("Couldn't find method selector : " + selector.getClassName(), ex);
}
}
/**
* Create an instance for the given class.
*/
public static Object createInstance(Class<?> declaringClass,
Map<Class, IClass> classes,
XmlTest xmlTest,
IAnnotationFinder finder,
ITestObjectFactory objectFactory)
{
if (objectFactory instanceof IObjectFactory) {
return createInstance1(declaringClass, classes, xmlTest, finder,
(IObjectFactory) objectFactory);
} else if (objectFactory instanceof IObjectFactory2) {
return createInstance2(declaringClass, (IObjectFactory2) objectFactory);
} else {
throw new AssertionError("Unknown object factory type:" + objectFactory);
}
}
private static Object createInstance2(Class<?> declaringClass, IObjectFactory2 objectFactory) {
return objectFactory.newInstance(declaringClass);
}
public static Object createInstance1(Class<?> declaringClass,
Map<Class, IClass> classes,
XmlTest xmlTest,
IAnnotationFinder finder,
IObjectFactory objectFactory) {
Object result = null;
try {
// Any annotated constructor?
Constructor<?> constructor = findAnnotatedConstructor(finder, declaringClass);
if (null != constructor) {
IParametersAnnotation annotation = finder.findAnnotation(constructor, IParametersAnnotation.class);
String[] parameterNames = annotation.getValue();
Object[] parameters = Parameters.createInstantiationParameters(constructor,
"@Parameters",
finder,
parameterNames,
xmlTest.getAllParameters(),
xmlTest.getSuite());
result = objectFactory.newInstance(constructor, parameters);
}
// No, just try to instantiate the parameterless constructor (or the one
// with a String)
else {
// If this class is a (non-static) nested class, the constructor contains a hidden
// parameter of the type of the enclosing class
Class<?>[] parameterTypes = new Class[0];
Object[] parameters = new Object[0];
Class<?> ec = getEnclosingClass(declaringClass);
boolean isStatic = 0 != (declaringClass.getModifiers() & Modifier.STATIC);
// Only add the extra parameter if the nested class is not static
if ((null != ec) && !isStatic) {
parameterTypes = new Class[] { ec };
// Create an instance of the enclosing class so we can instantiate
// the nested class (actually, we reuse the existing instance).
IClass enclosingIClass = classes.get(ec);
Object[] enclosingInstances;
if (null != enclosingIClass) {
enclosingInstances = enclosingIClass.getInstances(false);
if ((null == enclosingInstances) || (enclosingInstances.length == 0)) {
Object o = objectFactory.newInstance(ec.getConstructor(parameterTypes));
enclosingIClass.addInstance(o);
enclosingInstances = new Object[] { o };
}
}
else {
enclosingInstances = new Object[] { ec.newInstance() };
}
Object enclosingClassInstance = enclosingInstances[0];
// Utils.createInstance(ec, classes, xmlTest, finder);
parameters = new Object[] { enclosingClassInstance };
} // isStatic
Constructor<?> ct;
try {
ct = declaringClass.getDeclaredConstructor(parameterTypes);
}
catch (NoSuchMethodException ex) {
ct = declaringClass.getDeclaredConstructor(String.class);
parameters = new Object[] { "Default test name" };
// If ct == null here, we'll pass a null
// constructor to the factory and hope it can deal with it
}
result = objectFactory.newInstance(ct, parameters);
}
}
catch (TestNGException ex) {
throw ex;
// throw new TestNGException("Couldn't instantiate class:" + declaringClass);
}
catch (NoSuchMethodException ex) {
}
catch (Throwable cause) {
// Something else went wrong when running the constructor
throw new TestNGException("An error occurred while instantiating class "
+ declaringClass.getName() + ": " + cause.getMessage(), cause);
}
if (result == null) {
if (! Modifier.isPublic(declaringClass.getModifiers())) {
//result should not be null
throw new TestNGException("An error occurred while instantiating class "
+ declaringClass.getName() + ". Check to make sure it can be accessed/instantiated.");
// } else {
// Utils.log(ClassHelper.class.getName(), 2, "Couldn't instantiate class " + declaringClass);
}
}
return result;
}
/**
* Class.getEnclosingClass() only exists on JDK5, so reimplementing it
* here.
*/
private static Class<?> getEnclosingClass(Class<?> declaringClass) {
Class<?> result = null;
String className = declaringClass.getName();
int index = className.indexOf("$");
if (index != -1) {
String ecn = className.substring(0, index);
try {
result = Class.forName(ecn);
}
catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
return result;
}
/**
* Find the best constructor given the parameters found on the annotation
*/
private static Constructor<?> findAnnotatedConstructor(IAnnotationFinder finder,
Class<?> declaringClass) {
Constructor<?>[] constructors = declaringClass.getDeclaredConstructors();
for (Constructor<?> result : constructors) {
IParametersAnnotation annotation = finder.findAnnotation(result, IParametersAnnotation.class);
if (null != annotation) {
String[] parameters = annotation.getValue();
Class<?>[] parameterTypes = result.getParameterTypes();
if (parameters.length != parameterTypes.length) {
throw new TestNGException("Parameter count mismatch: " + result + "\naccepts "
+ parameterTypes.length
+ " parameters but the @Test annotation declares "
+ parameters.length);
}
else {
return result;
}
}
}
return null;
}
public static <T> T tryOtherConstructor(Class<T> declaringClass) {
T result;
try {
// Special case for inner classes
if (declaringClass.getModifiers() == 0) {
return null;
}
Constructor<T> ctor = declaringClass.getConstructor(String.class);
result = ctor.newInstance("Default test name");
}
catch (Exception e) {
String message = e.getMessage();
if ((message == null) && (e.getCause() != null)) {
message = e.getCause().getMessage();
}
String error = "Could not create an instance of class " + declaringClass
+ ((message != null) ? (": " + message) : "")
+ ".\nPlease make sure it has a constructor that accepts either a String or no parameter.";
throw new TestNGException(error);
}
return result;
}
/**
* When given a file name to form a class name, the file name is parsed and divided
* into segments. For example, "c:/java/classes/com/foo/A.class" would be divided
* into 6 segments {"C:" "java", "classes", "com", "foo", "A"}. The first segment
* actually making up the class name is [3]. This value is saved in m_lastGoodRootIndex
* so that when we parse the next file name, we will try 3 right away. If 3 fails we
* will take the long approach. This is just a optimization cache value.
*/
private static int m_lastGoodRootIndex = -1;
/**
* Returns the Class object corresponding to the given name. The name may be
* of the following form:
* <ul>
* <li>A class name: "org.testng.TestNG"</li>
* <li>A class file name: "/testng/src/org/testng/TestNG.class"</li>
* <li>A class source name: "d:\testng\src\org\testng\TestNG.java"</li>
* </ul>
*
* @param file
* the class name.
* @return the class corresponding to the name specified.
*/
public static Class<?> fileToClass(String file) {
Class<?> result = null;
if(!file.endsWith(".class") && !file.endsWith(".java")) {
// Doesn't end in .java or .class, assume it's a class name
if (file.startsWith("class ")) {
file = file.substring("class ".length());
}
result = ClassHelper.forName(file);
if (null == result) {
throw new TestNGException("Cannot load class from file: " + file);
}
return result;
}
int classIndex = file.lastIndexOf(".class");
if (-1 == classIndex) {
classIndex = file.lastIndexOf(".java");
// if(-1 == classIndex) {
// result = ClassHelper.forName(file);
// if (null == result) {
// throw new TestNGException("Cannot load class from file: " + file);
// return result;
}
// Transforms the file name into a class name.
// Remove the ".class" or ".java" extension.
String shortFileName = file.substring(0, classIndex);
// Split file name into segments. For example "c:/java/classes/com/foo/A"
// becomes {"c:", "java", "classes", "com", "foo", "A"}
String[] segments = shortFileName.split("[/\\\\]", -1);
// Check if the last good root index works for this one. For example, if the previous
// name was "c:/java/classes/com/foo/A.class" then m_lastGoodRootIndex is 3 and we
// try to make a class name ignoring the first m_lastGoodRootIndex segments (3). This
// will succeed rapidly if the path is the same as the one from the previous name.
if (-1 != m_lastGoodRootIndex) {
// TODO use a SringBuffer here
String className = segments[m_lastGoodRootIndex];
for (int i = m_lastGoodRootIndex + 1; i < segments.length; i++) {
className += "." + segments[i];
}
result = ClassHelper.forName(className);
if (null != result) {
return result;
}
}
// We haven't found a good root yet, start by resolving the class from the end segment
// and work our way up. For example, if we start with "c:/java/classes/com/foo/A"
// we'll start by resolving "A", then "foo.A", then "com.foo.A" until something
// resolves. When it does, we remember the path we are at as "lastGoodRoodIndex".
// TODO CQ use a StringBuffer here
String className = null;
for (int i = segments.length - 1; i >= 0; i
if (null == className) {
className = segments[i];
}
else {
className = segments[i] + "." + className;
}
result = ClassHelper.forName(className);
if (null != result) {
m_lastGoodRootIndex = i;
break;
}
}
if (null == result) {
throw new TestNGException("Cannot load class from file: " + file);
}
return result;
}
}
|
package szewek.mcflux.util;
import com.rollbar.Rollbar;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import it.unimi.dsi.fastutil.longs.Long2ObjectMap;
import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap;
import net.minecraftforge.fml.common.Loader;
import net.minecraftforge.fml.common.ModContainer;
import szewek.mcflux.L;
import szewek.mcflux.R;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.GZIPOutputStream;
public enum MCFluxReport {
;
private static Rollbar rollbar = new Rollbar(R.MF_ACCESS_TOKEN, R.MF_ENVIRONMENT, null, R.MF_VERSION, null, null, null, null, null, null, null, new HashMap<>(), null, null, null, null);
private static final Int2ObjectMap<ErrMsg> errMsgs = new Int2ObjectOpenHashMap<>();
private static final Long2ObjectMap<Timer> timers = new Long2ObjectOpenHashMap<>();
private static final DateFormat fileDate = new SimpleDateFormat("yyyy-MM-dd_HH.mm.ss");
public static void init() {
rollbar.getCustom().putAll(Collections.singletonMap("Mods", Loader.instance().getIndexedModList().keySet()));
}
public static void handleErrors() {
Thread t = Thread.currentThread();
Thread.UncaughtExceptionHandler ueh = t.getUncaughtExceptionHandler();
t.setUncaughtExceptionHandler(new Uncaught(ueh));
}
public static void sendException(Throwable th, String n) {
rollbar.warning(th, n + ": " + th.getMessage());
}
public static void addErrMsg(ErrMsg em) {
int hc = em.hashCode();
em.sendInfo(rollbar);
if (errMsgs.containsKey(hc)) {
ErrMsg xem = errMsgs.get(hc);
xem.addThrowable(em.msgThrown);
xem.addUp();
} else {
errMsgs.put(hc, em);
em.addUp();
}
}
public static long measureTime(String s) {
long hc = ((long) s.hashCode() << 32) + Thread.currentThread().hashCode();
Timer tt;
if (timers.containsKey(hc)) {
tt = timers.get(hc);
} else {
tt = new Timer(s);
timers.put(hc, tt);
}
tt.start();
return hc;
}
public static void stopTimer(long hc) {
Timer tt = timers.get(hc);
if (tt != null)
tt.stop();
}
public static void makeReportFile(File dirf) throws IOException {
File f = new File(dirf, "mcflux-" + fileDate.format(new Date()) + ".log.gz");
PrintStream ps = new PrintStream(new GZIPOutputStream(new FileOutputStream(f)));
ps.println("== TIMER MEASURES");
for (Timer tt : timers.values()) {
ps.print("! " + tt.name + " [" + tt.thName + "]; ");
long lmin, lmax, ltot = 0;
double lavg;
long[] l = tt.getMeasures();
ps.print(l.length + " × ");
lmin = lmax = l[0];
for (int i = 0; i < l.length; i++) {
if (i > 0) {
if (l[i] < lmin)
lmin = l[i];
if (l[i] > lmax)
lmax = l[i];
}
ltot += l[i];
}
lavg = (double) ltot / l.length;
ps.println(ltot + " ns (avg. " + lavg + " ns; min/max " + lmin + '/' + lmax + " ns)");
}
ps.println("== END OF TIMER MEASURES");
timers.clear();
if (!errMsgs.isEmpty()) {
ps.println("== START OF ERROR MESSAGES");
for (ErrMsg em : errMsgs.values()) {
ps.println("+-- ErrMsg: " + em);
ps.println(em.makeInfo());
List<Throwable> lt = em.getThrowables();
if (lt.isEmpty())
ps.println("| No throwables found.");
else {
ps.println("| Throwables: " + lt.size());
for (Throwable th : lt) {
if (th == null) {
ps.println("A null throwable found.");
continue;
}
th.printStackTrace(ps);
ps.println();
}
}
ps.println("+
}
ps.println("== END OF ERROR MESSAGES");
errMsgs.clear();
} else
L.info("No errors found!");
ps.close();
}
public static void listAllConflictingMods() {
String[] mods = new String[] {
"energysynergy",
"commoncapabilities"
};
Map<String, ModContainer> modmap = Loader.instance().getIndexedModList();
List<String> sl = new ArrayList<>();
for (String m : mods) {
if (modmap.containsKey(m)) {
sl.add(modmap.get(m).getName());
}
}
if (!sl.isEmpty()) {
StringBuilder sb = new StringBuilder("There are mods that can cause a conflict with Minecraft-Flux: ");
boolean comma = false;
for (String s : sl) {
sb.append(s);
if (comma)
sb.append(", ");
comma = true;
}
L.warn(sb.toString());
}
}
static final class Uncaught implements Thread.UncaughtExceptionHandler {
private final Thread.UncaughtExceptionHandler ueh;
Uncaught(Thread.UncaughtExceptionHandler ueh) {
this.ueh = ueh;
}
@Override public void uncaughtException(Thread t, Throwable e) {
rollbar.error(e, "Uncaught Exception from [" + t.getName() + "]: " + e.getMessage());
if (ueh != null && !ueh.equals(this))
ueh.uncaughtException(t, e);
else
t.getThreadGroup().uncaughtException(t, e);
}
}
}
|
package techreborn.client.gui;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.GuiScreen;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.client.renderer.RenderHelper;
import net.minecraft.client.renderer.RenderItem;
import net.minecraft.client.renderer.texture.TextureAtlasSprite;
import net.minecraft.client.renderer.texture.TextureMap;
import net.minecraft.client.resources.I18n;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ResourceLocation;
import net.minecraft.util.text.TextFormatting;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fml.client.config.GuiUtils;
import net.minecraftforge.fml.common.Loader;
import reborncore.api.tile.IUpgradeable;
import reborncore.client.guibuilder.GuiBuilder;
import reborncore.common.powerSystem.PowerSystem;
import reborncore.common.powerSystem.TilePowerAcceptor;
import techreborn.init.ModItems;
import techreborn.lib.ModInfo;
import techreborn.proxies.ClientProxy;
import java.util.ArrayList;
import java.util.List;
import static net.minecraft.item.ItemStack.EMPTY;
public class TRBuilder extends GuiBuilder {
public static final ResourceLocation GUI_SHEET = new ResourceLocation(ModInfo.MOD_ID.toLowerCase() + ":" + "textures/gui/gui_sheet.png");
public TRBuilder() {
super(GUI_SHEET);
}
public void drawMultiEnergyBar(GuiBase gui, int x, int y, int energyStored, int maxEnergyStored, int mouseX, int mouseY, int buttonID, GuiBase.Layer layer) {
if(GuiBase.showSlotConfig){
return;
}
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
if (layer == GuiBase.Layer.FOREGROUND) {
mouseX -= gui.getGuiLeft();
mouseY -= gui.getGuiTop();
}
Minecraft.getMinecraft().getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(x, y, PowerSystem.getDisplayPower().xBar - 15, PowerSystem.getDisplayPower().yBar - 1, 14, 50);
int draw = (int) ((double) energyStored / (double) maxEnergyStored * (48));
if (energyStored > maxEnergyStored) {
draw = (int) ((double) maxEnergyStored / (double) maxEnergyStored * (48));
}
gui.drawTexturedModalRect(x + 1, y + 49 - draw, PowerSystem.getDisplayPower().xBar, 48 + PowerSystem.getDisplayPower().yBar - draw, 12, draw);
int percentage = percentage(maxEnergyStored, energyStored);
if (isInRect(x + 1, y + 1, 11, 48, mouseX, mouseY)) {
List<String> list = new ArrayList<>();
TextFormatting powerColour = TextFormatting.GOLD;
list.add(powerColour + PowerSystem.getLocaliszedPowerFormattedNoSuffix(energyStored) + "/" + PowerSystem.getLocaliszedPowerFormattedNoSuffix(maxEnergyStored) + " " + PowerSystem.getDisplayPower().abbreviation);
list.add(getPercentageColour(percentage) + "" + percentage + "%" + TextFormatting.GRAY + " Charged");
if(gui.tile instanceof TilePowerAcceptor && GuiScreen.isShiftKeyDown()){
((TilePowerAcceptor) gui.tile).addInfo(list, true);
list.add("");
list.add(TextFormatting.BLUE + "Click to change display unit");
} else {
list.add("");
list.add(TextFormatting.BLUE + "Shift" + TextFormatting.GRAY + " for more info");
}
net.minecraftforge.fml.client.config.GuiUtils.drawHoveringText(list, mouseX, mouseY, gui.width, gui.height, -1, gui.mc.fontRenderer);
GlStateManager.disableLighting();
GlStateManager.color(1, 1, 1, 1);
}
gui.addPowerButton(x, y, buttonID, layer);
}
public void drawProgressBar(GuiBase gui, int progress, int maxProgress, int x, int y, int mouseX, int mouseY, ProgressDirection direction, GuiBase.Layer layer) {
if(GuiBase.showSlotConfig){
return;
}
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
if (layer == GuiBase.Layer.FOREGROUND) {
mouseX -= gui.getGuiLeft();
mouseY -= gui.getGuiTop();
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(x, y, direction.x, direction.y, direction.width, direction.height);
if (direction.equals(ProgressDirection.RIGHT)) {
int j = (int) ((double) progress / (double) maxProgress * 16);
if (j < 0)
j = 0;
gui.drawTexturedModalRect(x, y, direction.xActive, direction.yActive, j, 10);
}
if (direction.equals(ProgressDirection.LEFT)) {
int j = (int) ((double) progress / (double) maxProgress * 16);
if (j < 0)
j = 0;
gui.drawTexturedModalRect(x + 16 - j, y, direction.xActive + 16 - j, direction.yActive, j, 10);
}
if (direction.equals(ProgressDirection.UP)) {
int j = (int) ((double) progress / (double) maxProgress * 16);
if (j < 0)
j = 0;
gui.drawTexturedModalRect(x, y + 16 - j, direction.xActive, direction.yActive + 16 - j, 10, j);
}
if (isInRect(x, y, direction.width, direction.height, mouseX, mouseY)) {
int percentage = percentage(maxProgress, progress);
List<String> list = new ArrayList<>();
list.add(getPercentageColour(percentage) + "" + percentage + "%");
net.minecraftforge.fml.client.config.GuiUtils.drawHoveringText(list, mouseX, mouseY, gui.width, gui.height, -1, gui.mc.fontRenderer);
GlStateManager.disableLighting();
GlStateManager.color(1, 1, 1, 1);
}
}
public void drawTank(GuiBase gui, int x, int y, int mouseX, int mouseY, FluidStack fluid, int maxCapacity, boolean isTankEmpty, GuiBase.Layer layer) {
if(GuiBase.showSlotConfig){
return;
}
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
if (layer == GuiBase.Layer.FOREGROUND) {
mouseX -= gui.getGuiLeft();
mouseY -= gui.getGuiTop();
}
int percentage = 0;
int amount = 0;
boolean empty = true;
if (!isTankEmpty) {
amount = fluid.amount;
percentage = percentage(maxCapacity, amount);
empty = false;
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(x, y, 228, 18, 22, 56);
if (!empty)
drawFluid(gui, fluid, x + 4, y + 4, 14, 48, maxCapacity);
gui.drawTexturedModalRect(x + 3, y + 3, 231, 74, 16, 50);
if (isInRect(x, y, 22, 56, mouseX, mouseY)) {
List<String> list = new ArrayList<>();
if (empty)
list.add(TextFormatting.GOLD + "Empty Tank");
else
list.add(TextFormatting.GOLD + "" + amount + "mB/" + maxCapacity + "mB " + fluid.getLocalizedName());
list.add(getPercentageColour(percentage) + "" + percentage + "%" + TextFormatting.GRAY + " Full");
net.minecraftforge.fml.client.config.GuiUtils.drawHoveringText(list, mouseX, mouseY, gui.width, gui.height, -1, gui.mc.fontRenderer);
GlStateManager.disableLighting();
GlStateManager.color(1, 1, 1, 1);
}
}
public void drawFluid(GuiBase gui, FluidStack fluid, int x, int y, int width, int height, int maxCapacity) {
gui.mc.renderEngine.bindTexture(TextureMap.LOCATION_BLOCKS_TEXTURE);
y += height;
final ResourceLocation still = fluid.getFluid().getStill(fluid);
final TextureAtlasSprite sprite = gui.mc.getTextureMapBlocks().getAtlasSprite(still.toString());
final int drawHeight = (int) (fluid.amount / (maxCapacity * 1F) * height);
final int iconHeight = sprite.getIconHeight();
int offsetHeight = drawHeight;
int iteration = 0;
while (offsetHeight != 0) {
final int curHeight = offsetHeight < iconHeight ? offsetHeight : iconHeight;
gui.drawTexturedModalRect(x, y - offsetHeight, sprite, width, curHeight);
offsetHeight -= curHeight;
iteration++;
if (iteration > 50)
break;
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
}
public void drawJEIButton(GuiBase gui, int x, int y, GuiBase.Layer layer) {
if(GuiBase.showSlotConfig){
return;
}
if (Loader.isModLoaded("jei")) {
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(x, y, 184, 70, 20, 12);
}
}
public void drawLockButton(GuiBase gui, int x, int y, int mouseX, int mouseY, GuiBase.Layer layer, boolean locked) {
if(GuiBase.showSlotConfig){
return;
}
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(x, y, 204, 70 + (locked ? 12 : 0) , 20, 12);
if (isInRect(x, y, 20, 12, mouseX, mouseY)) {
List<String> list = new ArrayList<>();
if(locked){
list.add("Unlock items");
} else {
list.add("Lock Items");
}
GlStateManager.pushMatrix();
net.minecraftforge.fml.client.config.GuiUtils.drawHoveringText(list, mouseX, mouseY, gui.width, gui.height, 80, gui.mc.fontRenderer);
GlStateManager.popMatrix();
}
}
public void drawHologramButton(GuiBase gui, int x, int y, int mouseX, int mouseY, GuiBase.Layer layer) {
if(GuiBase.showSlotConfig){
return;
}
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
if (layer == GuiBase.Layer.FOREGROUND) {
mouseX -= gui.getGuiLeft();
mouseY -= gui.getGuiTop();
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
if (ClientProxy.multiblockRenderEvent.currentMultiblock == null) {
gui.drawTexturedModalRect(x, y, 184, 94, 20, 12);
} else {
gui.drawTexturedModalRect(x, y, 184, 106, 20, 12);
}
if (isInRect(x, y, 20, 12, mouseX, mouseY)) {
List<String> list = new ArrayList<>();
list.add("Toggle Multiblock Hologram");
GlStateManager.pushMatrix();
net.minecraftforge.fml.client.config.GuiUtils.drawHoveringText(list, mouseX, mouseY, gui.width, gui.height, -1, gui.mc.fontRenderer);
GlStateManager.popMatrix();
}
}
public void drawUpDownButtons(GuiBase gui, int x, int y, GuiBase.Layer layer){
if(GuiBase.showSlotConfig){
return;
}
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(x, y, 150, 70, 12, 12);
gui.drawTexturedModalRect(x + 12, y, 150, 82, 12, 12);
gui.drawTexturedModalRect(x + 24, y, 150, 94, 12, 12);
gui.drawTexturedModalRect(x + 36, y, 150, 106, 12, 12);
}
public void drawUpDownButtonsSmall(GuiBase gui, int x, int y, GuiBase.Layer layer){
if(GuiBase.showSlotConfig){
return;
}
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
//gui.drawTexturedModalRect(x, y, 150, 70, 12, 12);
gui.drawTexturedModalRect(x + 12, y, 150, 82, 12, 12);
gui.drawTexturedModalRect(x + 24, y, 150, 94, 12, 12);
//gui.drawTexturedModalRect(x + 36, y, 150, 106, 12, 12);
}
public void drawEnergyOutput(GuiBase gui, int right, int top, int maxOutput, GuiBase.Layer layer){
if(GuiBase.showSlotConfig){
return;
}
String text = PowerSystem.getLocaliszedPowerFormattedNoSuffix(maxOutput) + " "
+ PowerSystem.getDisplayPower().abbreviation + "/t";
int width = gui.mc.fontRenderer.getStringWidth(text);
gui.drawString(text, right - 17 - width, top + 5, 0, layer);
if (layer == GuiBase.Layer.BACKGROUND) {
right += gui.getGuiLeft();
top += gui.getGuiTop();
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(right - 16, top, 162, 101, 16, 17);
}
public void drawBigBlueBar(GuiBase gui, int x, int y, int value, int max, int mouseX, int mouseY, String suffix, GuiBase.Layer layer) {
if(GuiBase.showSlotConfig){
return;
}
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
if (!suffix.equals("")) {
suffix = " " + suffix;
}
gui.drawTexturedModalRect(x, y, 0, 218, 114, 18);
int j = (int) ((double) value / (double) max * 106);
if (j < 0)
j = 0;
gui.drawTexturedModalRect(x + 4, y + 4, 0, 236, j, 10);
gui.drawCentredString(value + suffix, y + 5, 0xFFFFFF, layer);
if (isInRect(x, y, 114, 18, mouseX, mouseY)) {
int percentage = percentage(max, value);
List<String> list = new ArrayList<>();
list.add("" + TextFormatting.GOLD + value + "/" + max + suffix);
list.add(getPercentageColour(percentage) + "" + percentage + "%" + TextFormatting.GRAY + " Full");
if (value > max) {
list.add(TextFormatting.GRAY + "Yo this is storing more than it should be able to");
list.add(TextFormatting.GRAY + "prolly a bug");
list.add(TextFormatting.GRAY + "pls report and tell how tf you did this");
}
net.minecraftforge.fml.client.config.GuiUtils.drawHoveringText(list, mouseX, mouseY, gui.width, gui.height, -1, gui.mc.fontRenderer);
GlStateManager.disableLighting();
GlStateManager.color(1, 1, 1, 1);
}
}
public void drawBigHeatBar(GuiBase gui, int x, int y, int value, int max, GuiBase.Layer layer) {
if(GuiBase.showSlotConfig){
return;
}
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(x, y, 0, 218, 114, 18);
if (value != 0) {
int j = (int) ((double) value / (double) max * 106);
if (j < 0)
j = 0;
gui.drawTexturedModalRect(x + 4, y + 4, 0, 246, j, 10);
gui.drawCentredString(value + " Heat", y + 5, 0xFFFFFF, layer);
}
}
public void drawMultiblockMissingBar(GuiBase gui, GuiBase.Layer layer) {
if(GuiBase.showSlotConfig){
return;
}
int x = 0;
int y = 4;
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
GlStateManager.disableLighting();
GlStateManager.disableDepth();
GlStateManager.colorMask(true, true, true, false);
GuiUtils.drawGradientRect(0, x, y, x + 176, y + 20, 0x000000, 0xC0000000);
GuiUtils.drawGradientRect(0, x, y + 20, x + 176, y + 20 + 48, 0xC0000000, 0xC0000000);
GuiUtils.drawGradientRect(0, x, y + 68, x + 176, y + 70 + 20, 0xC0000000, 0x00000000);
GlStateManager.colorMask(true, true, true, true);
GlStateManager.enableDepth();
gui.drawCentredString(I18n.format("techreborn.message.missingmultiblock"), 43, 0xFFFFFF, layer);
}
public void drawBigBlueBar(GuiBase gui, int x, int y, int value, int max, int mouseX, int mouseY, GuiBase.Layer layer) {
drawBigBlueBar(gui, x, y, value, max, mouseX, mouseY, "", layer);
}
public void drawSelectedStack(GuiBase gui, int x, int y) {
Minecraft.getMinecraft().getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(x - 4, y - 4, 202, 44, 24, 24);
}
public void drawBurnBar(GuiBase gui, int progress, int maxProgress, int x, int y, int mouseX, int mouseY, GuiBase.Layer layer) {
if (layer == GuiBase.Layer.BACKGROUND) {
x += gui.getGuiLeft();
y += gui.getGuiTop();
}
if (layer == GuiBase.Layer.FOREGROUND) {
mouseX -= gui.getGuiLeft();
mouseY -= gui.getGuiTop();
}
gui.mc.getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(x, y, 171, 84, 13, 13);
int j = 13 - (int) ((double) progress / (double) maxProgress * 13);
if (j > 0) {
gui.drawTexturedModalRect(x, y + j, 171, 70 + j, 13, 13 - j);
}
if (isInRect(x, y, 12, 12, mouseX, mouseY)) {
int percentage = percentage(maxProgress, progress);
List<String> list = new ArrayList<>();
list.add(getPercentageColour(percentage) + "" + percentage + "%");
net.minecraftforge.fml.client.config.GuiUtils.drawHoveringText(list, mouseX, mouseY, gui.width, gui.height, -1, gui.mc.fontRenderer);
GlStateManager.disableLighting();
GlStateManager.color(1, 1, 1, 1);
}
}
@Override
public void drawSlot(GuiScreen gui, int posX, int posY) {
Minecraft.getMinecraft().getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(posX, posY, 150, 0, 18, 18);
}
public void drawUpgrades(GuiScreen gui, IUpgradeable upgradeable, int posX, int posY) {
Minecraft.getMinecraft().getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(posX - 27, posY + 4, 126, 151, 30, 87);
}
public void drawSlotTab(GuiScreen gui, int posX, int posY, int mouseX, int mouseY, boolean upgrades){
int offset = 0;
if(!upgrades){
offset = 80;
}
Minecraft.getMinecraft().getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(posX - 26, posY + 84 - offset, 157, 148, 30, 30);
renderItemStack(new ItemStack(ModItems.WRENCH), posX - 19, posY + 92 - offset);
}
public void renderItemStack(ItemStack stack, int x, int y) {
if (stack != EMPTY) {
GlStateManager.enableBlend();
GlStateManager.blendFunc(GlStateManager.SourceFactor.SRC_ALPHA, GlStateManager.DestFactor.ONE_MINUS_SRC_ALPHA);
RenderHelper.enableGUIStandardItemLighting();
RenderItem itemRenderer = Minecraft.getMinecraft().getRenderItem();
itemRenderer.renderItemAndEffectIntoGUI(stack, x, y);
GlStateManager.disableLighting();
}
}
public void drawScrapSlot(GuiScreen gui, int posX, int posY) {
Minecraft.getMinecraft().getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(posX, posY, 150, 0, 18, 18);
}
public void drawOutputSlotBar(GuiScreen gui, int posX, int posY, int count) {
Minecraft.getMinecraft().getTextureManager().bindTexture(GUI_SHEET);
for (int i = 1; i <= count; i++) {
if (i == 1) {
gui.drawTexturedModalRect(posX, posY, 125 + 39, 218, 22, 26);
posX += 22;
if (1 == count) {
gui.drawTexturedModalRect(posX, posY, 147 + 39, 218, 4, 26);
}
} else if (i != 1 && i != count) {
gui.drawTexturedModalRect(posX, posY, 127 + 39, 218, 20, 26);
posX += 20;
} else if (i == count) {
gui.drawTexturedModalRect(posX, posY, 127 + 39, 218, 24, 26);
posX += 24;
}
}
}
@Override
public void drawOutputSlot(GuiScreen gui, int posX, int posY) {
Minecraft.getMinecraft().getTextureManager().bindTexture(GUI_SHEET);
gui.drawTexturedModalRect(posX, posY, 150, 18, 26, 26);
}
public int getScaledBurnTime(int scale, int burnTime, int totalBurnTime) {
return (int) (((float) burnTime / (float) totalBurnTime) * scale);
}
public TextFormatting getPercentageColour(int percentage) {
if (percentage <= 10) {
return TextFormatting.RED;
} else if (percentage >= 75) {
return TextFormatting.GREEN;
} else {
return TextFormatting.YELLOW;
}
}
public int percentage(int MaxValue, int CurrentValue) {
if (CurrentValue == 0)
return 0;
return (int) ((CurrentValue * 100.0f) / MaxValue);
}
public enum ProgressDirection {
RIGHT(84, 151, 100, 151, 16, 10), LEFT(100, 161, 84, 161, 16, 10), DOWN(104, 171, 114, 171, 10, 16), UP(84, 171, 94, 171, 10, 16);
public int x;
public int y;
public int xActive;
public int yActive;
public int width;
public int height;
ProgressDirection(int x, int y, int xActive, int yActive, int width, int height) {
this.x = x;
this.y = y;
this.xActive = xActive;
this.yActive = yActive;
this.width = width;
this.height = height;
}
}
}
|
package techreborn.events;
import cpw.mods.fml.common.eventhandler.EventPriority;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import cpw.mods.fml.common.gameevent.TickEvent;
import cpw.mods.fml.relauncher.Side;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.Item;
import techreborn.init.ModItems;
public class TRTickHandler extends TickEvent {
public TRTickHandler(Type type, Side side, Phase phase) {
super(type, side, phase);
}
public Item previouslyWearing;
@SubscribeEvent(priority=EventPriority.NORMAL, receiveCanceled=true)
public void onPlayerTick(TickEvent.PlayerTickEvent e) {
EntityPlayer player = e.player;
Item chestslot = player.getEquipmentInSlot(3) != null ? player.getEquipmentInSlot(3v).getItem() : null;
if(previouslyWearing != chestslot && previouslyWearing == ModItems.cloakingDevice && player.isInvisible())
player.setInvisible(false);
previouslyWearing = chestslot;
}
}
|
package tutorial.storm.trident;
import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.LocalDRPC;
import backtype.storm.generated.StormTopology;
import backtype.storm.spout.SchemeAsMultiScheme;
import backtype.storm.tuple.Fields;
import com.google.common.collect.ImmutableList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import storm.kafka.KafkaConfig;
import storm.kafka.StringScheme;
import storm.kafka.trident.TransactionalTridentKafkaSpout;
import storm.kafka.trident.TridentKafkaConfig;
import storm.trident.TridentTopology;
import tutorial.storm.trident.operations.Print;
import tutorial.storm.trident.testutil.TweetIngestor;
import java.io.File;
import java.io.IOException;
import java.util.UUID;
import static com.google.common.base.Preconditions.checkArgument;
/**
* @author Enno Shioji (enno.shioji@peerindex.com)
*/
public class Skeleton {
private static final Logger log = LoggerFactory.getLogger(Skeleton.class);
public static StormTopology buildTopology(LocalDRPC drpc, TransactionalTridentKafkaSpout spout) throws IOException {
TridentTopology topology = new TridentTopology();
topology
.newStream("tweets", spout)
.each(new Fields("str"), new Print())
;
topology
.newDRPCStream("fake", drpc)
.each(new Fields("args"), new Print())
;
return topology.build();
}
public static void main(String[] args) throws Exception {
Config conf = new Config();
LocalDRPC drpc = new LocalDRPC();
LocalCluster cluster = new LocalCluster();
String testKafkaBrokerHost;
if (args.length == 0) {
final String tmpFile = "/tmp/"+UUID.randomUUID();
new File(tmpFile).deleteOnExit();
TweetIngestor ingestor = new TweetIngestor(tmpFile, "test", 12000);
ingestor.startAndWait();
testKafkaBrokerHost = "127.0.0.1:12000";
}else{
checkArgument(args.length == 1);
testKafkaBrokerHost = args[0];
}
TransactionalTridentKafkaSpout tweetSpout = tweetSpout(testKafkaBrokerHost);
cluster.submitTopology("hackaton", conf, buildTopology(drpc, tweetSpout));
while (!Thread.currentThread().isInterrupted()) {
Thread.sleep(3000);
System.out.println(drpc.execute("fake", "test"));
}
}
private static TransactionalTridentKafkaSpout tweetSpout(String testKafkaBrokerHost) {
KafkaConfig.BrokerHosts hosts = TridentKafkaConfig.StaticHosts.fromHostString(ImmutableList.of(testKafkaBrokerHost), 1);
TridentKafkaConfig config = new TridentKafkaConfig(hosts, "test");
config.scheme = new SchemeAsMultiScheme(new StringScheme());
return new TransactionalTridentKafkaSpout(config);
}
}
|
package model.ability_management;
/**
* A collection of all possible abilities
* Enumerated with a string for parsing and view purposes
*/
public enum AbilityEnum {
MOVE_NORTH ("Move North"),
MOVE_NORTHLEFT ("Move North Left"),
MOVE_NORTHRIGHT ("Move North Right"),
MOVE_SOUTH ("Move South"),
MOVE_SOUTHLEFT ("Move South Left"),
MOVE_SOUTHRIGHT ("Move South Right"),
TRADE_TRANSPORT ("Trade with Transport"),
TRADE_TILE ("Trade with the Tile"),
BUILD_WAGON ("Build A Wagon"),
BUILD_TRUCK ("Build A Truck"),
BUILD_RAFT ("Build A Raft"),
BUILD_ROWBOAT ("Build A Rowboat"),
BUILD_STEAMBOAT ("Build A Steamboat"),
SCUTTLE_TRANSPORTER ("Destroy This Transporter"),
BREED_DONKEY ("Breed A Donkey"),
RESEARCH ("Perform Research"),
UPGRADE_WAGON_FACTORY ("Upgrade A Wagon Factory"),
UPGRADE_RAFT_FACTORY ("Upgrade A Raft Factory"),
UPGRADE_ROWBOAT_FACTORY ("Upgrade A Rowboat Factory"),
INPUT_RESOURCES ("Input Resources To A Secondary Producer"),
DROP_RESOURCES ("Drop Off Resources On Tile"),
PICK_UP_RESOURCES ("Pick Up Resources From Tile"),
PICK_UP_TRANSPORTER ("Pick Up Transporter From Tile"),
BUILD_CLAY_PIT ("Build A Clay Pit"),
BUILD_QUARRY ("Build A Quarry"),
BUILD_MINE ("Build A Mine"),
BUILD_OIL_RIG ("Build An Oil Rig"),
BUILD_SAWMILL ("Build A Sawmill"),
BUILD_MINT ("Build A Mint"),
BUILD_STOCK_MARKET ("Build A Stock Market"),
BUILD_WAGON_FACTORY ("Build A Wagon Factory"),
BUILD_TRUCK_FACTORY ("Build A Truck Factory"),
BUILD_RAFT_FACTORY ("Build A Raft Factory"),
BUILD_ROWBOAT_FACTORY ("Build A Rowboat Factory"),
BUILD_STEAMBOAT_FACTORY ("Build A Steamboat Factory"),
BUILD_WALL ("Make America Great Again"),
DESTROY_WALL ("Destroy A Wall"),
STRENGTHEN_WALL ("Strengthen A Wall"),
BUILD_MINE_SHAFT ("Build A Mine Shaft"),
BUILD_ROAD ("Build A Road"),
BUILD_BRIDGE ("Build A Bridge"),
BUILD_WONDER_BRICK ("Build A Wonder Brick");
// GUI description for each ability
private final String description;
// Constructor
AbilityEnum(final String s) {
this.description = s;
}
// Return the ability description
public String getDescription() { return this.description; }
}
|
// Administrator of the National Aeronautics and Space Administration
// This software is distributed under the NASA Open Source Agreement
// (NOSA), version 1.3. The NOSA has been approved by the Open Source
// Initiative. See the file NOSA-1.3-JPF at the top of the distribution
// directory tree for the complete NOSA document.
// KIND, EITHER EXPRESSED, IMPLIED, OR STATUTORY, INCLUDING, BUT NOT
// SPECIFICATIONS, ANY IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR
// DOCUMENTATION, IF PROVIDED, WILL CONFORM TO THE SUBJECT SOFTWARE.
package gov.nasa.jpf.listener;
import gov.nasa.jpf.Config;
import gov.nasa.jpf.JPFConfigException;
import gov.nasa.jpf.PropertyListenerAdapter;
import gov.nasa.jpf.jvm.FieldInfo;
import gov.nasa.jpf.jvm.JVM;
import gov.nasa.jpf.jvm.LocalVarInfo;
import gov.nasa.jpf.jvm.MethodInfo;
import gov.nasa.jpf.jvm.StackFrame;
import gov.nasa.jpf.jvm.ThreadInfo;
import gov.nasa.jpf.jvm.bytecode.DSTORE;
import gov.nasa.jpf.jvm.bytecode.FSTORE;
import gov.nasa.jpf.jvm.bytecode.FieldInstruction;
import gov.nasa.jpf.jvm.bytecode.ISTORE;
import gov.nasa.jpf.jvm.bytecode.PUTFIELD;
import gov.nasa.jpf.jvm.bytecode.PUTSTATIC;
import gov.nasa.jpf.jvm.bytecode.InstructionVisitorAdapter;
import gov.nasa.jpf.jvm.bytecode.LSTORE;
import gov.nasa.jpf.jvm.bytecode.LocalVariableInstruction;
import gov.nasa.jpf.search.Search;
import gov.nasa.jpf.util.FieldSpec;
import gov.nasa.jpf.util.VarSpec;
/**
* little listener that checks value ranges of specified numeric fields and local vars
*
* configuration examples:
*
* range.fields=speed,..
* range.speed.field=x.y.SomeClass.velocity
* range.speed.min=300
* range.speed.max=500
*
* range.vars=altitude,..
* range.altitude.var=x.y.SomeClass.computeTrajectory(int):a
* range.altitude.min=125000
*
*/
public class NumericValueChecker extends PropertyListenerAdapter {
static abstract class RangeCheck {
double min, max;
RangeCheck (double min, double max){
this.min = min;
this.max = max;
}
String check (long v){
if (v < (long)min){
return String.format("%d < %d", v, (long)min);
} else if (v > (long)max){
return String.format("%d > %d", v, (long)max);
}
return null;
}
String check (double v){
if (v < min){
return String.format("%f < %f", v, min);
} else if (v > (long)max){
return String.format("%f > %f", v, max);
}
return null;
}
}
static class FieldCheck extends RangeCheck {
FieldSpec fspec;
FieldCheck (FieldSpec fspec, double min, double max){
super(min,max);
this.fspec = fspec;
}
boolean matches (FieldInfo fi){
return fspec.matches(fi);
}
}
static class VarCheck extends RangeCheck {
VarSpec vspec;
VarCheck (VarSpec vspec, double min, double max){
super(min,max);
this.vspec = vspec;
}
LocalVarInfo getMatch (MethodInfo mi, int pc, int slotIdx){
return vspec.getMatchingLocalVarInfo(mi, pc, slotIdx);
}
}
class Visitor extends InstructionVisitorAdapter {
void checkFieldInsn (FieldInstruction insn){
if (fieldChecks != null){
FieldInfo fi = insn.getFieldInfo();
for (int i = 0; i < fieldChecks.length; i++) {
FieldCheck fc = fieldChecks[i];
if (fc.matches(fi)) {
if (fi.isNumericField()) {
long lv = insn.getLastValue();
String errorCond = fi.isFloatingPointField()
? fc.check(Double.longBitsToDouble(lv)) : fc.check(lv);
if (errorCond != null) {
error = String.format("field %s out of range: %s\n\t at %s",
fi.getFullName(), errorCond, insn.getSourceLocation());
vm.breakTransition(); // terminate this transition
break;
}
}
}
}
}
}
void checkVarInsn (LocalVariableInstruction insn){
if (varChecks != null){
ThreadInfo ti = vm.getLastThreadInfo();
StackFrame frame = ti.getTopFrame();
int slotIdx = insn.getLocalVariableIndex();
for (int i = 0; i < varChecks.length; i++) {
VarCheck vc = varChecks[i];
MethodInfo mi = insn.getMethodInfo();
int pc = insn.getPosition()+1; // the scope would begin on the next insn, we are still at the xSTORE
LocalVarInfo lvar = vc.getMatch(mi, pc, slotIdx);
if (lvar != null) {
long v = lvar.getSlotSize() == 1 ? frame.getLocalVariable(slotIdx) : frame.getLongLocalVariable(slotIdx);
String errorCond = lvar.isFloatingPoint()
? vc.check(Double.longBitsToDouble(v)) : vc.check(v);
if (errorCond != null) {
error = String.format("local variable %s out of range: %s\n\t at %s",
lvar.getName(), errorCond, insn.getSourceLocation());
vm.breakTransition(); // terminate this transition
break;
}
}
}
}
}
@Override
public void visit(PUTFIELD insn){
checkFieldInsn(insn);
}
@Override
public void visit(PUTSTATIC insn){
checkFieldInsn(insn);
}
@Override
public void visit(ISTORE insn){
checkVarInsn(insn);
}
@Override
public void visit(LSTORE insn){
checkVarInsn(insn);
}
@Override
public void visit(FSTORE insn){
checkVarInsn(insn);
}
@Override
public void visit(DSTORE insn){
checkVarInsn(insn);
}
}
JVM vm;
Visitor visitor;
// the stuff we monitor
FieldCheck[] fieldChecks;
VarCheck[] varChecks;
String error; // where we store errorCond details
public NumericValueChecker (Config conf){
visitor = new Visitor();
createFieldChecks(conf);
createVarChecks(conf);
}
private void createFieldChecks(Config conf){
String[] checkIds = conf.getCompactTrimmedStringArray("range.fields");
if (checkIds.length > 0){
fieldChecks = new FieldCheck[checkIds.length];
for (int i = 0; i < checkIds.length; i++) {
String id = checkIds[i];
FieldCheck check = null;
String keyPrefix = "range." + id;
String spec = conf.getString(keyPrefix + ".field");
if (spec != null) {
FieldSpec fs = FieldSpec.createFieldSpec(spec);
if (fs != null) {
double min = conf.getDouble(keyPrefix + ".min", Double.MIN_VALUE);
double max = conf.getDouble(keyPrefix + ".max", Double.MAX_VALUE);
check = new FieldCheck(fs, min, max);
}
}
if (check == null) {
throw new JPFConfigException("illegal field range check specification for " + id);
}
fieldChecks[i] = check;
}
}
}
private void createVarChecks(Config conf){
String[] checkIds = conf.getCompactTrimmedStringArray("range.vars");
if (checkIds.length > 0){
varChecks = new VarCheck[checkIds.length];
for (int i = 0; i < checkIds.length; i++) {
String id = checkIds[i];
VarCheck check = null;
String keyPrefix = "range." + id;
String spec = conf.getString(keyPrefix + ".var");
if (spec != null) {
VarSpec vs = VarSpec.createVarSpec(spec);
if (vs != null) {
double min = conf.getDouble(keyPrefix + ".min", Double.MIN_VALUE);
double max = conf.getDouble(keyPrefix + ".max", Double.MAX_VALUE);
check = new VarCheck(vs, min, max);
}
}
if (check == null) {
throw new JPFConfigException("illegal variable range check specification for " + id);
}
varChecks[i] = check;
}
}
}
@Override
public void instructionExecuted (JVM vm){
this.vm = vm;
vm.getLastInstruction().accept(visitor);
}
@Override
public boolean check(Search search, JVM vm) {
return (error == null);
}
@Override
public void reset () {
error = null;
}
@Override
public String getErrorMessage(){
return error;
}
}
|
package ai.elimu.rest.v1;
import java.util.ArrayList;
import java.util.List;
import ai.elimu.model.admin.Application;
import ai.elimu.model.Device;
import ai.elimu.model.content.Number;
import ai.elimu.model.content.Word;
import ai.elimu.model.admin.ApplicationVersion;
import ai.elimu.model.content.Allophone;
import ai.elimu.model.content.Letter;
import ai.elimu.model.content.StoryBook;
import ai.elimu.model.content.StoryBookChapter;
import ai.elimu.model.content.Syllable;
import ai.elimu.model.content.multimedia.Audio;
import ai.elimu.model.content.multimedia.Image;
import ai.elimu.model.content.multimedia.Video;
import ai.elimu.model.gson.DeviceGson;
import ai.elimu.model.gson.content.NumberGson;
import ai.elimu.model.gson.content.WordGson;
import ai.elimu.model.gson.admin.ApplicationGson;
import ai.elimu.model.gson.admin.ApplicationVersionGson;
import ai.elimu.model.gson.content.AllophoneGson;
import ai.elimu.model.gson.content.LetterGson;
import ai.elimu.model.gson.content.StoryBookChapterGson;
import ai.elimu.model.gson.content.StoryBookGson;
import ai.elimu.model.gson.content.SyllableGson;
import ai.elimu.model.gson.content.multimedia.AudioGson;
import ai.elimu.model.gson.content.multimedia.ImageGson;
import ai.elimu.model.gson.content.multimedia.VideoGson;
/**
* Convert classes from JPA/Hibernate format to POJO format, so that they can be serialized into
* JSON and transferred to Android applications that are connecting via the REST API.
*/
public class JavaToGsonConverter {
public static AudioGson getAudioGson(Audio audio) {
if (audio == null) {
return null;
} else {
AudioGson audioGson = new AudioGson();
audioGson.setId(audio.getId());
audioGson.setLanguage(audio.getLanguage());
audioGson.setTimeLastUpdate(audio.getTimeLastUpdate());
audioGson.setRevisionNumber(audio.getRevisionNumber());
audioGson.setContentStatus(audio.getContentStatus());
audioGson.setDownloadUrl("/audio/" + audio.getId() + "." + audio.getAudioFormat().toString().toLowerCase());
audioGson.setDownloadSize(audio.getBytes().length / 1024);
audioGson.setContentType(audio.getContentType());
audioGson.setLiteracySkills(audio.getLiteracySkills());
audioGson.setNumeracySkills(audio.getNumeracySkills());
List<LetterGson> letters = new ArrayList<>();
for (Letter letter : audio.getLetters()) {
LetterGson letterGson = getLetterGson(letter);
letters.add(letterGson);
}
audioGson.setLetters(letters);
List<NumberGson> numbers = new ArrayList<>();
for (Number number : audio.getNumbers()) {
NumberGson numberGson = getNumberGson(number);
numbers.add(numberGson);
}
audioGson.setNumbers(numbers);
List<WordGson> words = new ArrayList<>();
for (Word word : audio.getWords()) {
WordGson wordGson = getWordGson(word);
words.add(wordGson);
}
audioGson.setWords(words);
audioGson.setTranscription(audio.getTranscription());
audioGson.setAudioType(audio.getAudioFormat());
return audioGson;
}
}
public static AllophoneGson getAllophoneGson(Allophone allophone) {
if (allophone == null) {
return null;
} else {
AllophoneGson allophoneGson = new AllophoneGson();
allophoneGson.setId(allophone.getId());
allophoneGson.setLanguage(allophone.getLanguage());
allophoneGson.setTimeLastUpdate(allophone.getTimeLastUpdate());
allophoneGson.setRevisionNumber(allophone.getRevisionNumber());
allophoneGson.setContentStatus(allophone.getContentStatus());
allophoneGson.setValueIpa(allophone.getValueIpa());
allophoneGson.setValueSampa(allophone.getValueSampa());
allophoneGson.setAudio(getAudioGson(allophone.getAudio()));
allophoneGson.setDiacritic(allophone.isDiacritic());
allophoneGson.setSoundType(allophone.getSoundType());
allophoneGson.setUsageCount(allophone.getUsageCount());
return allophoneGson;
}
}
public static ApplicationGson getApplicationGson(Application application) {
if (application == null) {
return null;
} else {
ApplicationGson applicationGson = new ApplicationGson();
applicationGson.setId(application.getId());
applicationGson.setLanguage(application.getLanguage());
applicationGson.setPackageName(application.getPackageName());
applicationGson.setInfrastructural(application.isInfrastructural());
applicationGson.setLiteracySkills(application.getLiteracySkills());
applicationGson.setNumeracySkills(application.getNumeracySkills());
applicationGson.setApplicationStatus(application.getApplicationStatus());
return applicationGson;
}
}
public static ApplicationVersionGson getApplicationVersionGson(ApplicationVersion applicationVersion) {
if (applicationVersion == null) {
return null;
} else {
ApplicationVersionGson applicationVersionGson = new ApplicationVersionGson();
applicationVersionGson.setId(applicationVersion.getId());
applicationVersionGson.setApplication(getApplicationGson(applicationVersion.getApplication()));
applicationVersionGson.setFileSizeInKb(applicationVersion.getFileSizeInKb());
applicationVersionGson.setFileUrl("/apk/" + applicationVersion.getApplication().getPackageName() + "-" + applicationVersion.getVersionCode() + ".apk");
applicationVersionGson.setChecksumMd5(applicationVersion.getChecksumMd5());
applicationVersionGson.setContentType(applicationVersion.getContentType());
applicationVersionGson.setVersionCode(applicationVersion.getVersionCode());
applicationVersionGson.setVersionName(applicationVersion.getVersionName());
applicationVersionGson.setLabel(applicationVersion.getLabel());
applicationVersionGson.setMinSdkVersion(applicationVersion.getMinSdkVersion());
applicationVersionGson.setTimeUploaded(applicationVersion.getTimeUploaded());
return applicationVersionGson;
}
}
public static DeviceGson getDeviceGson(Device device) {
if (device == null) {
return null;
} else {
DeviceGson deviceGson = new DeviceGson();
deviceGson.setId(device.getId());
deviceGson.setDeviceId(device.getDeviceId());
return deviceGson;
}
}
public static ImageGson getImageGson(Image image) {
if (image == null) {
return null;
} else {
ImageGson imageGson = new ImageGson();
imageGson.setId(image.getId());
imageGson.setLanguage(image.getLanguage());
imageGson.setTimeLastUpdate(image.getTimeLastUpdate());
imageGson.setRevisionNumber(image.getRevisionNumber());
imageGson.setContentStatus(image.getContentStatus());
imageGson.setDownloadUrl("/image/" + image.getId() + "." + image.getImageFormat().toString().toLowerCase());
imageGson.setDownloadSize(image.getBytes().length / 1024);
imageGson.setContentType(image.getContentType());
imageGson.setLiteracySkills(image.getLiteracySkills());
imageGson.setNumeracySkills(image.getNumeracySkills());
List<LetterGson> letters = new ArrayList<>();
for (Letter letter : image.getLetters()) {
LetterGson letterGson = getLetterGson(letter);
letters.add(letterGson);
}
imageGson.setLetters(letters);
List<NumberGson> numbers = new ArrayList<>();
for (Number number : image.getNumbers()) {
NumberGson numberGson = getNumberGson(number);
numbers.add(numberGson);
}
imageGson.setNumbers(numbers);
List<WordGson> words = new ArrayList<>();
for (Word word : image.getWords()) {
WordGson wordGson = getWordGson(word);
words.add(wordGson);
}
imageGson.setWords(words);
imageGson.setTitle(image.getTitle());
imageGson.setImageFormat(image.getImageFormat());
imageGson.setDominantColor(image.getDominantColor());
return imageGson;
}
}
public static LetterGson getLetterGson(Letter letter) {
if (letter == null) {
return null;
} else {
LetterGson letterGson = new LetterGson();
letterGson.setId(letter.getId());
letterGson.setLanguage(letter.getLanguage());
letterGson.setTimeLastUpdate(letter.getTimeLastUpdate());
letterGson.setRevisionNumber(letter.getRevisionNumber());
letterGson.setContentStatus(letter.getContentStatus());
letterGson.setText(letter.getText());
List<AllophoneGson> allophones = new ArrayList<>();
for (Allophone allophone : letter.getAllophones()) {
AllophoneGson allophoneGson = getAllophoneGson(allophone);
allophones.add(allophoneGson);
}
letterGson.setAllophones(allophones);
letterGson.setUsageCount(letter.getUsageCount());
return letterGson;
}
}
public static NumberGson getNumberGson(Number number) {
if (number == null) {
return null;
} else {
NumberGson numberGson = new NumberGson();
numberGson.setId(number.getId());
numberGson.setLanguage(number.getLanguage());
numberGson.setTimeLastUpdate(number.getTimeLastUpdate());
numberGson.setRevisionNumber(number.getRevisionNumber());
numberGson.setContentStatus(number.getContentStatus());
numberGson.setValue(number.getValue());
numberGson.setSymbol(number.getSymbol());
numberGson.setWord(getWordGson(number.getWord()));
List<WordGson> words = new ArrayList<>();
for (Word word : number.getWords()) {
WordGson wordGson = getWordGson(word);
words.add(wordGson);
}
numberGson.setWords(words);
return numberGson;
}
}
public static SyllableGson getSyllableGson(Syllable syllable) {
if (syllable == null) {
return null;
} else {
SyllableGson syllableGson = new SyllableGson();
syllableGson.setId(syllable.getId());
syllableGson.setLanguage(syllable.getLanguage());
syllableGson.setTimeLastUpdate(syllable.getTimeLastUpdate());
syllableGson.setRevisionNumber(syllable.getRevisionNumber());
syllableGson.setContentStatus(syllable.getContentStatus());
syllableGson.setText(syllable.getText());
List<AllophoneGson> allophones = new ArrayList<>();
for (Allophone allophone : syllable.getAllophones()) {
AllophoneGson allophoneGson = getAllophoneGson(allophone);
allophones.add(allophoneGson);
}
syllableGson.setAllophones(allophones);
syllableGson.setUsageCount(syllable.getUsageCount());
return syllableGson;
}
}
public static WordGson getWordGson(Word word) {
if (word == null) {
return null;
} else {
WordGson wordGson = new WordGson();
wordGson.setId(word.getId());
wordGson.setLanguage(word.getLanguage());
wordGson.setTimeLastUpdate(word.getTimeLastUpdate());
wordGson.setRevisionNumber(word.getRevisionNumber());
wordGson.setContentStatus(word.getContentStatus());
wordGson.setText(word.getText());
// TODO: setLetters
List<AllophoneGson> allophones = new ArrayList<>();
for (Allophone allophone : word.getAllophones()) {
AllophoneGson allophoneGson = getAllophoneGson(allophone);
allophones.add(allophoneGson);
}
wordGson.setAllophones(allophones);
wordGson.setUsageCount(word.getUsageCount());
wordGson.setWordType(word.getWordType());
wordGson.setSpellingConsistency(word.getSpellingConsistency());
return wordGson;
}
}
public static StoryBookGson getStoryBookGson(StoryBook storyBook) {
if (storyBook == null) {
return null;
} else {
StoryBookGson storyBookGson = new StoryBookGson();
storyBookGson.setId(storyBook.getId());
storyBookGson.setLanguage(storyBook.getLanguage());
storyBookGson.setTimeLastUpdate(storyBook.getTimeLastUpdate());
storyBookGson.setRevisionNumber(storyBook.getRevisionNumber());
storyBookGson.setContentStatus(storyBook.getContentStatus());
storyBookGson.setTitle(storyBook.getTitle());
storyBookGson.setDescription(storyBook.getDescription());
storyBookGson.setCoverImage(getImageGson(storyBook.getCoverImage()));
storyBookGson.setGradeLevel(storyBook.getGradeLevel());
// TODO: setStoryBookChapters
return storyBookGson;
}
}
public static StoryBookChapterGson getStoryBookChapter(StoryBookChapter storyBookChapter) {
if (storyBookChapter == null) {
return null;
} else {
StoryBookChapterGson storyBookChapterGson = new StoryBookChapterGson();
storyBookChapterGson.setId(storyBookChapter.getId());
storyBookChapterGson.setStoryBook(getStoryBookGson(storyBookChapter.getStoryBook()));
storyBookChapterGson.setSortOrder(storyBookChapter.getSortOrder());
// TODO: setStoryBookParagraphs
return storyBookChapterGson;
}
}
public static VideoGson getVideoGson(Video video) {
if (video == null) {
return null;
} else {
VideoGson videoGson = new VideoGson();
videoGson.setId(video.getId());
videoGson.setLanguage(video.getLanguage());
videoGson.setTimeLastUpdate(video.getTimeLastUpdate());
videoGson.setRevisionNumber(video.getRevisionNumber());
videoGson.setContentStatus(video.getContentStatus());
videoGson.setDownloadUrl("/video/" + video.getId() + "." + video.getVideoFormat().toString().toLowerCase());
videoGson.setDownloadSize(video.getBytes().length / 1024);
videoGson.setContentType(video.getContentType());
videoGson.setLiteracySkills(video.getLiteracySkills());
videoGson.setNumeracySkills(video.getNumeracySkills());
List<LetterGson> letters = new ArrayList<>();
for (Letter letter : video.getLetters()) {
LetterGson letterGson = getLetterGson(letter);
letters.add(letterGson);
}
videoGson.setLetters(letters);
List<NumberGson> numbers = new ArrayList<>();
for (Number number : video.getNumbers()) {
NumberGson numberGson = getNumberGson(number);
numbers.add(numberGson);
}
videoGson.setNumbers(numbers);
List<WordGson> words = new ArrayList<>();
for (Word word : video.getWords()) {
WordGson wordGson = getWordGson(word);
words.add(wordGson);
}
videoGson.setWords(words);
videoGson.setTitle(video.getTitle());
videoGson.setVideoFormat(video.getVideoFormat());
videoGson.setThumbnailDownloadUrl("/video/" + video.getId() + "/thumbnail.png");
return videoGson;
}
}
}
|
package ameba.security.shiro.util;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.StringUtils;
import java.net.URI;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @author icode
*/
public class URIMatcher {
protected static final Pattern URI_REGEX = Pattern.compile("\\{(.*?)\\}");
protected static final String WILDCARD_TOKEN = "*";
private Set<String> methods = Sets.newHashSet();
private String uri;
private Pattern uriPattern;
private boolean uriRegex = false;
public URIMatcher(String uriWithM) {
String[] um = uriWithM.trim().split(":");
init(um[0], um.length > 1 ? um[1].split("\\s+") : new String[]{WILDCARD_TOKEN});
}
public URIMatcher(String uri, String... m) {
init(uri, m);
}
protected void init(String uri, String[] m) {
this.uri = uri.trim();
if (m != null && m.length > 0) {
for (String mt : m) {
if (StringUtils.isNotBlank(mt)) {
methods.add(mt.trim().toUpperCase());
}
}
}
if (this.uri.startsWith("/")) {
this.uri = this.uri.substring(1);
}
Matcher matcher = URI_REGEX.matcher(this.uri);
StringBuilder regex = new StringBuilder("^");
int start = 0;
while (matcher.find()) {
this.uriRegex = true;
int ms = matcher.start();
regex.append(Pattern.quote(this.uri.substring(start, ms)));
start = matcher.end();
regex.append(matcher.group(1));
}
if (this.uriRegex) {
regex.append("$");
uriPattern = Pattern.compile(regex.toString());
}
}
public boolean isUriRegex() {
return uriRegex;
}
public Set<String> getMethods() {
return methods;
}
public String getUri() {
return uri;
}
public Pattern getUriPattern() {
return uriPattern;
}
public boolean matches(URI reqUri, String method) {
if (getMethods().contains(WILDCARD_TOKEN)
|| getMethods().contains(method)) {
String uri = getUri();
String path;
if (isUriRegex()) {
boolean hasFr = uri.contains("
boolean hasQr = uri.contains("\\?");
if (hasFr && hasQr) {
path = reqUri.getPath() + "#" + reqUri.getFragment() + "?" + reqUri.getQuery();
} else if (!hasFr && !hasQr) {
path = reqUri.getPath();
} else if (hasFr) {
path = reqUri.getPath() + "#" + reqUri.getFragment();
} else {
path = reqUri.getPath() + "?" + reqUri.getQuery();
}
if (getUriPattern().matcher(path).matches()) {
return true;
}
} else {
path = reqUri.getPath();
if (uri.endsWith("**")) {
if (path.startsWith(uri.substring(0, uri.length() - 3))) {
return true;
}
} else if (uri.endsWith("*")) {
int index = uri.length() - 2;
if (path.startsWith(uri.substring(0, index)) && path.indexOf("/", index + 1) == -1) {
return true;
}
} else if (path.equals(uri)) {
return true;
}
}
}
return false;
}
}
|
package be.bow.db.cached;
import be.bow.application.memory.MemoryManager;
import be.bow.cache.Cache;
import be.bow.cache.CacheableData;
import be.bow.cache.CachesManager;
import be.bow.db.DataInterface;
import be.bow.db.LayeredDataInterface;
import be.bow.util.DataLock;
import be.bow.util.KeyValue;
import java.util.Iterator;
import java.util.List;
public class CachedDataInterface<T extends Object> extends LayeredDataInterface<T> implements CacheableData<T> {
private final MemoryManager memoryManager;
private final Cache<T> readCache;
private final Cache<T> writeCache;
private final DataLock writeLock;
public CachedDataInterface(CachesManager cachesManager, MemoryManager memoryManager, DataInterface<T> baseInterface) {
super(baseInterface);
this.memoryManager = memoryManager;
this.readCache = cachesManager.createNewCache(this, false, getName() + "_read");
this.writeCache = cachesManager.createNewCache(this, true, getName() + "_write");
this.writeLock = new DataLock();
}
@Override
public T readInt(long key) {
T value = readCache.get(key);
if (value == null) {
//Never read, read from direct
value = baseInterface.read(key);
readCache.put(key, value);
}
return value;
}
@Override
public boolean mightContain(long key) {
T cachedValue = readCache.get(key);
if (cachedValue != null) {
return true;
} else {
return baseInterface.mightContain(key);
}
}
@Override
public void writeInt(long key, T value) {
memoryManager.waitForSufficientMemory();
writeLock.lockWrite(key);
nonSynchronizedWrite(key, value);
writeLock.unlockWrite(key);
}
private void nonSynchronizedWrite(long key, T value) {
T currentValue = writeCache.get(key);
boolean combine = value != null; //Current action is not a delete
combine = combine && currentValue != null; //Key is already in write cache
if (combine) {
T combinedValue = getCombinator().combine(currentValue, value);
if (combinedValue != null) {
writeCache.put(key, combinedValue);
} else {
writeCache.put(key, null);
}
} else {
writeCache.put(key, value);
}
}
@Override
public void write(Iterator<KeyValue<T>> entries) {
flushWriteCache();
baseInterface.write(entries);
}
@Override
public synchronized void close() {
flush();
baseInterface.close();
}
public void flush() {
flushWriteCache();
baseInterface.flush();
}
private void flushWriteCache() {
writeLock.lockWriteAll();
writeCache.flush();
writeLock.unlockWriteAll();
}
@Override
public void dropAllData() {
writeCache.clear();
readCache.clear();
baseInterface.dropAllData();
}
public long apprSize() {
return writeCache.size() + baseInterface.apprSize();
}
@Override
public void removedValues(Cache cache, List<KeyValue<T>> valuesToRemove) {
if (cache == writeCache) {
int size = valuesToRemove.size();
if (size > 0) {
baseInterface.write(valuesToRemove.iterator());
}
}
}
@Override
public void valuesChanged(long[] keys) {
super.valuesChanged(keys);
for (Long key : keys) {
readCache.remove(key);
}
}
}
|
package be.crydust.tokenreplacer;
import static java.util.stream.Collectors.toList;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.PathMatcher;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author kristof
*/
public class FilesFinder implements Callable<List<Path>> {
private static final Logger LOGGER = LoggerFactory.getLogger(FilesFinder.class);
private final Path path;
private final PathMatcher includesMatcher;
private final PathMatcher excludesMatcher;
private static final PathMatcher ALL_FALSE = it -> false;
/**
* FilesFinder with only one include pattern
*
* @param path
* @param include
* @param excludes
*/
public FilesFinder(Path path, String include, String[] excludes) {
this(path, new String[]{include}, excludes);
}
/**
* FilesFinder with only one multiple include patterns
*
* @param path
* @param includes
* @param excludes
*/
public FilesFinder(Path path, String[] includes, String[] excludes) {
Objects.requireNonNull(path);
Objects.requireNonNull(includes);
Objects.requireNonNull(excludes);
if (includes.length == 0) {
throw new IllegalArgumentException("includes should not be empty");
}
this.path = path;
this.includesMatcher = FileSystems.getDefault()
.getPathMatcher(patternsToGlob(includes));
if (excludes.length == 0) {
this.excludesMatcher = ALL_FALSE;
} else {
this.excludesMatcher = FileSystems.getDefault()
.getPathMatcher(patternsToGlob(excludes));
}
}
@Override
public List<Path> call() {
try {
return Files.find(path, Integer.MAX_VALUE, (file, attrs) -> {
Path relativePath = path.relativize(file);
return includesMatcher.matches(relativePath)
&& !excludesMatcher.matches(relativePath);
}).collect(toList());
} catch (IOException ex) {
System.err.println(ex.getMessage());
LOGGER.error(null, ex);
}
return Collections.emptyList();
}
private static String escapeGlob(String pattern) {
return pattern.replaceAll("([\\[\\]!{}])", "\\\\$1");
}
private static String patternsToGlob(String[] patterns) {
StringBuilder sb = new StringBuilder();
int partCount = 0;
for (String pattern : patterns) {
Strings.requireNonEmpty(pattern);
if (pattern.startsWith("**/")) {
String extraPattern = pattern.substring(3);
Strings.requireNonEmpty(extraPattern);
partCount++;
sb.append(escapeGlob(extraPattern)).append(',');
}
partCount++;
sb.append(escapeGlob(pattern)).append(',');
}
sb.setLength(sb.length() - 1);
if (partCount > 1) {
sb.insert(0, '{').append('}');
}
sb.insert(0, "glob:");
return sb.toString();
}
}
|
package co.trackin.client.api;
import co.trackin.client.ApiException;
import java.util.*;
import co.trackin.client.model.Company;
import co.trackin.client.model.CompanyForm;
import co.trackin.client.model.CompanyFormUpdate;
import co.trackin.client.model.Void;
import java.util.Map;
import java.util.HashMap;
import static java.lang.String.valueOf;
public class CompanyService {
TrackinApi trackinApi;
CompanyService(TrackinApi trackinApi) {
this.trackinApi = trackinApi;
}
public List<Company> getAll(String deliveryAddress, String mode) throws ApiException {
// create path and map variables
String path = "/service/api/json/1.1/companies".replaceAll("\\{format\\}", "json");
// query params
Map<String, String> queryParams = new HashMap<String, String>();
Map<String, String> headerParams = new HashMap<String, String>();
if (!"null".equals(valueOf(deliveryAddress)))
queryParams.put("for", valueOf(deliveryAddress));
if (!"null".equals(valueOf(mode)))
queryParams.put("mode", valueOf(mode));
try {
String response = trackinApi.invokeAPI(path, "GET", queryParams, null, headerParams);
if (response != null) {
return (List<Company>) trackinApi.deserialize(response, "array", Company.class);
} else {
return null;
}
} catch (ApiException ex) {
if (ex.getCode() == 404) {
return null;
} else {
throw ex;
}
}
}
public Company create(CompanyForm body) throws ApiException {
// create path and map variables
String path = "/service/api/json/1.1/companies".replaceAll("\\{format\\}", "json");
// query params
Map<String, String> queryParams = new HashMap<String, String>();
Map<String, String> headerParams = new HashMap<String, String>();
try {
String response = trackinApi.invokeAPI(path, "POST", queryParams, body, headerParams);
if (response != null) {
return (Company) trackinApi.deserialize(response, "", Company.class);
} else {
return null;
}
} catch (ApiException ex) {
if (ex.getCode() == 404) {
return null;
} else {
throw ex;
}
}
}
public Company getOne(Long companyId) throws ApiException {
// create path and map variables
String path = "/service/api/json/1.1/companies/{companyId}".replaceAll("\\{format\\}", "json")
.replaceAll("\\{" + "companyId" + "\\}", trackinApi.escapeString(companyId.toString()));
// query params
Map<String, String> queryParams = new HashMap<String, String>();
Map<String, String> headerParams = new HashMap<String, String>();
try {
String response = trackinApi.invokeAPI(path, "GET", queryParams, null, headerParams);
if (response != null) {
return (Company) trackinApi.deserialize(response, "", Company.class);
} else {
return null;
}
} catch (ApiException ex) {
if (ex.getCode() == 404) {
return null;
} else {
throw ex;
}
}
}
public Company update(Long companyId, CompanyFormUpdate body) throws ApiException {
// create path and map variables
String path = "/service/api/json/1.1/companies/{companyId}".replaceAll("\\{format\\}", "json")
.replaceAll("\\{" + "companyId" + "\\}", trackinApi.escapeString(companyId.toString()));
// query params
Map<String, String> queryParams = new HashMap<String, String>();
Map<String, String> headerParams = new HashMap<String, String>();
try {
String response = trackinApi.invokeAPI(path, "PUT", queryParams, body, headerParams);
if (response != null) {
return (Company) trackinApi.deserialize(response, "", Company.class);
} else {
return null;
}
} catch (ApiException ex) {
if (ex.getCode() == 404) {
return null;
} else {
throw ex;
}
}
}
public Void updateCompanyAccounts (Long companyId, Long accountId) throws ApiException {
// create path and map variables
String path = "/service/api/json/1.1/companies/{companyId}/account/{accountId}".replaceAll("\\{format\\}", "json")
.replaceAll("\\{" + "companyId" + "\\}", trackinApi.escapeString(companyId.toString()))
.replaceAll("\\{" + "accountId" + "\\}", trackinApi.escapeString(accountId.toString()));
// query params
Map<String, String> queryParams = new HashMap<String, String>();
Map<String, String> headerParams = new HashMap<String, String>();
try {
String response = trackinApi.invokeAPI(path, "PUT", queryParams, null, headerParams);
if (response != null) {
return (Void) trackinApi.deserialize(response, "", Void.class);
} else {
return null;
}
} catch (ApiException ex) {
if (ex.getCode() == 404) {
return null;
} else {
throw ex;
}
}
}
}
|
package com.adobe.epubcheck.ops;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import com.adobe.epubcheck.api.Report;
import com.adobe.epubcheck.ocf.OCFPackage;
import com.adobe.epubcheck.opf.OPFChecker;
import com.adobe.epubcheck.opf.OPFChecker30;
import com.adobe.epubcheck.opf.XRefChecker;
import com.adobe.epubcheck.util.EPUBVersion;
import com.adobe.epubcheck.util.EpubTypeAttributes;
import com.adobe.epubcheck.util.HandlerUtil;
import com.adobe.epubcheck.util.Messages;
import com.adobe.epubcheck.util.MetaUtils;
import com.adobe.epubcheck.util.PathUtil;
import com.adobe.epubcheck.xml.XMLElement;
import com.adobe.epubcheck.xml.XMLParser;
public class OPSHandler30 extends OPSHandler {
String properties;
HashSet<String> prefixSet;
HashSet<String> propertiesSet;
String mimeType;
boolean video = false;
boolean audio = false;
boolean hasValidFallback = false;
int imbricatedObjects = 0;
int imbricatedCanvases = 0;
public static HashSet<String> linkClassSet;
boolean reportedUnsupportedXMLVersion;
static {
HashSet<String> set = new HashSet<String>();
set.add("vertical");
set.add("horizontal");
set.add("day");
set.add("night");
linkClassSet = set;
}
public OPSHandler30(OCFPackage ocf, String path, String mimeType, String properties,
XRefChecker xrefChecker, XMLParser parser, Report report, EPUBVersion version) {
super(ocf, path, xrefChecker, parser, report, version);
this.mimeType = mimeType;
this.properties = properties;
prefixSet = new HashSet<String>();
propertiesSet = new HashSet<String>();
reportedUnsupportedXMLVersion = false;
}
boolean checkPrefix(String prefix) {
prefix = prefix.trim();
if (!prefixSet.contains(prefix)) {
report.error(path, parser.getLineNumber(),
parser.getColumnNumber(), "Undecleared prefix: " + prefix);
return false;
}
return true;
}
private void checkType(String type) {
if (type == null)
return;
MetaUtils.validateProperties(type, EpubTypeAttributes.EpubTypeSet,
prefixSet, path, parser.getLineNumber(),
parser.getColumnNumber(), report, false);
}
private void checkSSMLPh(String ph) {
//issue 139; enhancement is to add real syntax check for IPA and x-SAMPA
if(ph == null)
return;
if (ph.trim().length() < 1)
report.warning(path, parser.getLineNumber(),
parser.getColumnNumber(), "Empty or whitespace-only value of attribute ssml:ph");
}
@Override
public void characters(char[] chars, int arg1, int arg2) {
super.characters(chars, arg1, arg2);
String str = new String(chars, arg1, arg2);
str = str.trim();
if (!str.equals("")
&& (audio || video || imbricatedObjects > 0 || imbricatedCanvases > 0))
hasValidFallback = true;
}
public void startElement() {
super.startElement();
if (!reportedUnsupportedXMLVersion)
reportedUnsupportedXMLVersion = HandlerUtil.checkXMLVersion(parser);
XMLElement e = parser.getCurrentElement();
String name = e.getName();
if (name.equals("html"))
HandlerUtil.processPrefixes(
e.getAttributeNS("http:
prefixSet, report, path, parser.getLineNumber(),
parser.getColumnNumber());
else if (name.equals("link"))
processLink(e);
else if (name.equals("object"))
processObject(e);
else if (name.equals("math"))
propertiesSet.add("mathml");
else if (!mimeType.equals("image/svg+xml") && name.equals("svg"))
propertiesSet.add("svg");
else if (name.equals("script"))
propertiesSet.add("scripted");
else if (name.equals("switch"))
propertiesSet.add("switch");
else if (name.equals("audio"))
processAudio(e);
else if (name.equals("video"))
processVideo(e);
else if (name.equals("canvas"))
processCanvas(e);
else if (name.equals("img"))
processImg(e);
processSrc(("source".equals(name)) ? e.getParent().getName() : name, e.getAttribute("src"));
checkType(e.getAttributeNS("http:
checkSSMLPh(e.getAttributeNS("http:
}
private void processLink(XMLElement e) {
String classAttribute = e.getAttribute("class");
if (classAttribute == null)
return;
Set<String> values = MetaUtils.validateProperties(classAttribute,
linkClassSet, null, path, parser.getLineNumber(),
parser.getColumnNumber(), report, false);
if (values.size() == 1)
return;
boolean vertical = false, horizontal = false, day = false, night = false;
Iterator<String> it = values.iterator();
while (it.hasNext()) {
String attribute = it.next();
if (attribute.equals("vertical"))
vertical = true;
else if (attribute.equals("horizontal"))
horizontal = true;
else if (attribute.equals("day"))
day = true;
else if (attribute.equals("night"))
night = true;
}
if (vertical && horizontal || day && night)
report.error(path, parser.getLineNumber(),
parser.getColumnNumber(), Messages.CONFLICTING_ATTRIBUTES
+ classAttribute);
}
private void processImg(XMLElement e) {
if ((audio || video || imbricatedObjects > 0 || imbricatedCanvases > 0))
hasValidFallback = true;
}
private void processCanvas(XMLElement e) {
imbricatedCanvases++;
}
private void processAudio(XMLElement e) {
audio = true;
}
private void processVideo(XMLElement e) {
video = true;
String posterSrc = e.getAttribute("poster");
String posterMimeType = null;
if (xrefChecker != null && posterSrc != null)
posterMimeType = xrefChecker.getMimeType(PathUtil
.resolveRelativeReference(path, posterSrc, base));
if (posterMimeType != null
&& !OPFChecker.isBlessedImageType(posterMimeType))
report.error(path, parser.getLineNumber(),
parser.getColumnNumber(),
"Video poster must have core media image type");
if (posterSrc != null) {
hasValidFallback = true;
processSrc(e.getName(), posterSrc);
}
}
private void processSrc(String name, String src) {
if (src != null) {
src.trim();
if (src.equals(""))
report.error(path, parser.getLineNumber(),
parser.getColumnNumber(),
"The src attribute must not be empty");
}
if (src == null || xrefChecker == null)
return;
if (src.startsWith("http:
propertiesSet.add("remote-resources");
else
src = PathUtil.resolveRelativeReference(path, src, base);
int refType;
if ("audio".equals(name)) {
refType = XRefChecker.RT_AUDIO;
} else if ("video".equals(name)) {
refType = XRefChecker.RT_VIDEO;
} else {
refType = XRefChecker.RT_GENERIC;
}
xrefChecker.registerReference(path, parser.getLineNumber(),
parser.getColumnNumber(), src, refType);
String srcMimeType = xrefChecker.getMimeType(src);
if (srcMimeType == null)
return;
if (!mimeType.equals("image/svg+xml")
&& srcMimeType.equals("image/svg+xml"))
propertiesSet.add("svg");
if ((audio || video || imbricatedObjects > 0 || imbricatedCanvases > 0)
&& OPFChecker30.isCoreMediaType(srcMimeType)
&& !name.equals("track"))
hasValidFallback = true;
}
private void processObject(XMLElement e) {
imbricatedObjects++;
String type = e.getAttribute("type");
String data = e.getAttribute("data");
if (data != null) {
processSrc(e.getName(), data);
data = PathUtil.resolveRelativeReference(path, data, base);
}
if (type != null && data != null && xrefChecker != null
&& !type.equals(xrefChecker.getMimeType(data)))
report.error(path, parser.getLineNumber(),
parser.getColumnNumber(),
"Object type and the item media-type declared in manifest, do not match");
if (type != null) {
if (!mimeType.equals("image/svg+xml")
&& type.equals("image/svg+xml"))
propertiesSet.add("svg");
if (OPFChecker30.isCoreMediaType(type))
hasValidFallback = true;
}
if (hasValidFallback)
return;
// check bindings
if (xrefChecker != null && type != null
&& xrefChecker.getBindingHandlerSrc(type) != null)
hasValidFallback = true;
}
@Override
public void endElement() {
super.endElement();
XMLElement e = parser.getCurrentElement();
String name = e.getName();
if (openElements == 0 && (name.equals("html") || name.equals("svg"))) {
checkProperties();
} else if (name.equals("object")) {
imbricatedObjects
if (imbricatedObjects == 0 && imbricatedCanvases == 0)
checkFallback("Object");
} else if (name.equals("canvas")) {
imbricatedCanvases
if (imbricatedObjects == 0 && imbricatedCanvases == 0)
checkFallback("Canvas");
} else if (name.equals("video")) {
if (imbricatedObjects == 0 && imbricatedCanvases == 0)
checkFallback("Video");
video = false;
} else if (name.equals("audio")) {
if (imbricatedObjects == 0 && imbricatedCanvases == 0)
checkFallback("Audio");
audio = false;
}
}
/*
* Checks fallbacks for video, audio and object elements
*/
private void checkFallback(String elementType) {
if (hasValidFallback)
hasValidFallback = false;
else
report.error(path, parser.getLineNumber(),
parser.getColumnNumber(), elementType
+ " element doesn't provide fallback");
}
private void checkProperties() {
if (properties != null && properties.equals("singleFileValidation"))
return;
if (properties != null) {
properties = properties.replaceAll("nav", "");
properties = properties.replaceAll("cover-image", "");
}
Iterator<String> propertyIterator = propertiesSet.iterator();
while (propertyIterator.hasNext()) {
String prop = propertyIterator.next();
if (properties != null && properties.contains(prop))
properties = properties.replaceAll(prop, "");
else
report.error(path, 0, 0,
"This file should declare in opf the property: " + prop);
}
if (properties != null)
properties = properties.trim();
if (properties != null && !properties.equals(""))
report.error(path, 0, 0,
"This file should not declare in opf the properties: "
+ properties);
}
}
|
package com.adyen.model;
import java.util.Objects;
import com.google.gson.annotations.SerializedName;
/**
* TransactionContainer
*/
public class TransactionContainer {
@SerializedName("Transaction")
private Transaction transaction = null;
/**
* transaction
*
* @return transaction
*/
public Transaction getTransaction() {
return transaction;
}
public void setTransaction(Transaction transaction) {
this.transaction = transaction;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TransactionContainer transactionContainer = (TransactionContainer) o;
return Objects.equals(this.transaction, transactionContainer.transaction);
}
@Override
public int hashCode() {
return Objects.hash(transaction);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class TransactionContainer {\n");
sb.append(" transaction: ").append(toIndentedString(transaction)).append("\n");
sb.append("}");
return sb.toString();
}
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
package com.alibaba.mtc;
import java.util.HashMap;
import java.util.Map;
import java.util.WeakHashMap;
/**
* {@link MtContextThreadLocal} can transmit context from the thread of submitting task to the thread of executing task.
* <p/>
* Note: this class extends {@link java.lang.InheritableThreadLocal},
* so {@link com.alibaba.mtc.MtContextThreadLocal} first is a {@link java.lang.InheritableThreadLocal}.
*
* @author ding.lid
* @see MtContextRunnable
* @see MtContextCallable
* @since 0.10.0
*/
public class MtContextThreadLocal<T> extends InheritableThreadLocal<T> {
/**
* Computes the context value for this multi-thread context variable
* as a function of the source thread's value at the time the task
* Object is created. This method is called from {@link com.alibaba.mtc.MtContextRunnable} or
* {@link com.alibaba.mtc.MtContextCallable} when it create, before the task is started.
* <p/>
* This method merely returns reference of its source thread value, and should be overridden
* if a different behavior is desired.
*
* @since 1.0.0
*/
protected T copyValue(T parentValue) {
return parentValue;
}
/**
* Override this method to have an initial value other than <tt>null</tt>.
*/
@Override
protected T initialValue() {
return super.initialValue();
}
@Override
protected T childValue(T parentValue) {
return super.childValue(parentValue);
}
@Override
public final T get() {
T value = super.get();
if (null != value) {
addMtContextThreadLocal();
}
return value;
}
@Override
public final void set(T value) {
super.set(value);
if (null == value) { // may set null to remove value
removeMtContextThreadLocal();
} else {
addMtContextThreadLocal();
}
}
@Override
public final void remove() {
removeMtContextThreadLocal();
super.remove();
}
T copyMtContextValue() {
return copyValue(get());
}
static ThreadLocal<Map<MtContextThreadLocal<?>, ?>> holder =
new ThreadLocal<Map<MtContextThreadLocal<?>, ?>>() {
@Override
protected Map<MtContextThreadLocal<?>, ?> initialValue() {
return new WeakHashMap<MtContextThreadLocal<?>, Object>();
}
};
void addMtContextThreadLocal() {
if (!holder.get().containsKey(this)) {
holder.get().put(this, null);
}
}
void removeMtContextThreadLocal() {
holder.get().remove(this);
}
static Map<MtContextThreadLocal<?>, Object> copy() {
Map<MtContextThreadLocal<?>, Object> copy = new HashMap<MtContextThreadLocal<?>, Object>();
for (MtContextThreadLocal<?> threadLocal : holder.get().keySet()) {
copy.put(threadLocal, threadLocal.copyMtContextValue());
}
return copy;
}
static Map<MtContextThreadLocal<?>, Object> backupAndSet(Map<MtContextThreadLocal<?>, Object> set) {
// backup MtContext
Map<MtContextThreadLocal<?>, Object> backup = new HashMap<MtContextThreadLocal<?>, Object>();
for (Map.Entry<MtContextThreadLocal<?>, Object> entry : set.entrySet()) {
@SuppressWarnings("unchecked")
MtContextThreadLocal<Object> threadLocal = (MtContextThreadLocal<Object>) entry.getKey();
backup.put(threadLocal, threadLocal.get());
threadLocal.set(entry.getValue());
}
return backup;
}
static void restore(Map<MtContextThreadLocal<?>, Object> backup) {
// restore MtContext
for (Map.Entry<MtContextThreadLocal<?>, Object> entry : backup.entrySet()) {
@SuppressWarnings("unchecked")
MtContextThreadLocal<Object> threadLocal = (MtContextThreadLocal<Object>) entry.getKey();
threadLocal.set(entry.getValue());
}
}
}
|
package com.celements.model.util;
import javax.annotation.Nullable;
import javax.validation.constraints.NotNull;
import org.xwiki.component.annotation.ComponentRole;
import org.xwiki.model.EntityType;
import org.xwiki.model.reference.EntityReference;
import org.xwiki.model.reference.WikiReference;
import com.google.common.collect.BiMap;
@ComponentRole
public interface IModelUtils {
@NotNull
public BiMap<Class<? extends EntityReference>, EntityType> getEntityTypeMap();
/**
* @param ref
* @return false if the given reference is relative
*/
public boolean isAbsoluteRef(@NotNull EntityReference ref);
/**
* @param ref
* the reference to be cloned
* @return a cloned instance of the reference
*/
@NotNull
public EntityReference cloneRef(@NotNull EntityReference ref);
@NotNull
public <T extends EntityReference> T cloneRef(@NotNull EntityReference ref,
@NotNull Class<T> token);
/**
* resolves the reference class for the given absolute name ({@link WikiReference} may be
* missing).<br>
* <br>
* simple names default to {@link WikiReference}.
*
* @param name
* the string representation
* @return the resolved reference class
*/
@NotNull
public Class<? extends EntityReference> resolveRefClass(@NotNull String name);
@NotNull
public EntityReference resolveRef(@NotNull String name);
@NotNull
public EntityReference resolveRef(@NotNull String name, @Nullable EntityReference baseRef);
@NotNull
public <T extends EntityReference> T resolveRef(@NotNull String name, @NotNull Class<T> token,
@Nullable EntityReference baseRef);
@NotNull
public <T extends EntityReference> T resolveRef(@NotNull String name, @NotNull Class<T> token);
/**
* @param ref
* @return serialised global string representation of the given reference (e.g. "wiki:space.doc")
*/
@NotNull
public String serializeRef(@NotNull EntityReference ref);
/**
* @param ref
* @return serialised local string representation of the given reference (e.g. "space.doc")
*/
@NotNull
public String serializeRefLocal(@NotNull EntityReference ref);
/**
* @param fromRef
* the reference to extract from
* @param token
* reference class to extract
* @return the extracted reference, may be null
*/
@Nullable
public <T extends EntityReference> T extractRef(@Nullable EntityReference fromRef,
@NotNull Class<T> token);
/**
* @param fromRef
* the reference to extract from
* @param defaultRef
* the default reference when unable to extract a reference
* @param token
* reference class to extract
* @return the extracted reference, may NOT be null
*/
@NotNull
public <T extends EntityReference> T extractRef(@Nullable EntityReference fromRef,
@NotNull T defaultRef, @NotNull Class<T> token);
/**
* adjust a reference to another one of higher order, e.g. a docRef to another wikiRef.
*
* @param ref
* to be adjusted
* @param token
* for the reference type
* @param toRef
* it is adjusted to
* @return a new instance of the adjusted reference or ref if toRef was of lower order
*/
@NotNull
public <T extends EntityReference> T adjustRef(@NotNull T ref, @NotNull Class<T> token,
@Nullable EntityReference toRef);
}
|
package com.docusign.esign.client.auth;
import java.util.List;
import java.util.Map;
import org.apache.oltu.oauth2.client.HttpClient;
import org.apache.oltu.oauth2.client.OAuthClient;
import org.apache.oltu.oauth2.client.request.OAuthClientRequest;
import org.apache.oltu.oauth2.client.response.OAuthJSONAccessTokenResponse;
import org.apache.oltu.oauth2.client.request.OAuthClientRequest.AuthenticationRequestBuilder;
import org.apache.oltu.oauth2.client.request.OAuthClientRequest.TokenRequestBuilder;
import org.apache.oltu.oauth2.common.message.types.GrantType;
import org.apache.oltu.oauth2.common.message.types.ResponseType;
import org.apache.oltu.oauth2.common.token.BasicOAuthToken;
import com.docusign.esign.client.Pair;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientHandlerException;
@javax.annotation.Generated(value = "class io.swagger.codegen.languages.JavaClientCodegen", date = "2017-03-06T16:42:36.211-08:00")
public class OAuth implements Authentication {
static final int MILLIS_PER_SECOND = 1000;
private volatile String accessToken;
private Long expirationTimeMillis;
private OAuthClient oauthClient;
private TokenRequestBuilder tokenRequestBuilder;
private AuthenticationRequestBuilder authenticationRequestBuilder;
private AccessTokenListener accessTokenListener;
public OAuth(Client client, TokenRequestBuilder tokenRequestBuilder, AuthenticationRequestBuilder authenticationRequestBuilder) {
this.oauthClient = new OAuthClient(new OAuthJerseyClient(client));
this.tokenRequestBuilder = tokenRequestBuilder;
this.authenticationRequestBuilder = authenticationRequestBuilder;
}
public OAuth(Client client, OAuthFlow flow, String authorizationUrl, String tokenUrl, String scopes) {
this(client, OAuthClientRequest.tokenLocation(tokenUrl).setScope(scopes), OAuthClientRequest.authorizationLocation(authorizationUrl).setScope(scopes));
switch (flow) {
case accessCode:
tokenRequestBuilder.setGrantType(GrantType.AUTHORIZATION_CODE);
authenticationRequestBuilder.setResponseType(ResponseType.CODE.name().toLowerCase());
break;
case implicit:
tokenRequestBuilder.setGrantType(GrantType.IMPLICIT);
authenticationRequestBuilder.setResponseType(ResponseType.TOKEN.name().toLowerCase());
break;
case password:
tokenRequestBuilder.setGrantType(GrantType.PASSWORD);
break;
case application:
tokenRequestBuilder.setGrantType(GrantType.CLIENT_CREDENTIALS);
break;
default:
break;
}
}
public OAuth(OAuthFlow flow, String authorizationUrl, String tokenUrl, String scopes) {
this(new Client(null, null), flow, authorizationUrl, tokenUrl, scopes);
}
@Override
public void applyToParams(List<Pair> queryParams, Map<String, String> headerParams) {
// If the request already have an authorization (eg. Basic auth), do
// nothing
if (headerParams.containsKey("Authorization")) {
return;
}
// If first time, get the token
if (expirationTimeMillis == null || System.currentTimeMillis() >= expirationTimeMillis) {
updateAccessToken();
}
if (accessToken != null) {
headerParams.put("Authorization", "Bearer " + accessToken);
}
}
public synchronized void updateAccessToken() {
OAuthJSONAccessTokenResponse accessTokenResponse;
try {
accessTokenResponse = oauthClient.accessToken(tokenRequestBuilder.buildBodyMessage());
} catch (Exception e) {
throw new ClientHandlerException(e.getMessage(), e);
}
if (accessTokenResponse != null && accessTokenResponse.getAccessToken() != null) {
if ((accessTokenResponse.getAccessToken() == null || (accessTokenResponse.getExpiresIn() == null)) {
throw new ClientHandlerException("Error while requesting an access token: " + accessTokenResponse);
}
setAccessToken(accessTokenResponse.getAccessToken(), accessTokenResponse.getExpiresIn());
if (accessTokenListener != null) {
accessTokenListener.notify((BasicOAuthToken) accessTokenResponse.getOAuthToken());
}
}
}
public synchronized void registerAccessTokenListener(AccessTokenListener accessTokenListener) {
this.accessTokenListener = accessTokenListener;
}
public synchronized String getAccessToken() {
return accessToken;
}
public synchronized void setAccessToken(String accessToken, Long expiresIn) {
this.accessToken = accessToken;
this.expirationTimeMillis = System.currentTimeMillis() + expiresIn * MILLIS_PER_SECOND;
}
public TokenRequestBuilder getTokenRequestBuilder() {
return tokenRequestBuilder;
}
public void setTokenRequestBuilder(TokenRequestBuilder tokenRequestBuilder) {
this.tokenRequestBuilder = tokenRequestBuilder;
}
public AuthenticationRequestBuilder getAuthenticationRequestBuilder() {
return authenticationRequestBuilder;
}
public void setAuthenticationRequestBuilder(AuthenticationRequestBuilder authenticationRequestBuilder) {
this.authenticationRequestBuilder = authenticationRequestBuilder;
}
public OAuthClient getOauthClient() {
return oauthClient;
}
public void setOauthClient(OAuthClient oauthClient) {
this.oauthClient = oauthClient;
}
public void setOauthClient(Client client) {
this.oauthClient = new OAuthClient(new OAuthJerseyClient(client));
}
}
|
package com.facebook.litho;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.graphics.Rect;
import android.support.annotation.AttrRes;
import android.support.annotation.ColorInt;
import android.support.annotation.DimenRes;
import android.support.annotation.Dimension;
import android.support.annotation.DrawableRes;
import android.support.annotation.Px;
import android.support.annotation.StringRes;
import android.support.v4.view.ViewCompat;
import android.text.TextUtils;
import android.util.SparseArray;
import com.facebook.R;
import com.facebook.litho.config.ComponentsConfiguration;
import com.facebook.litho.reference.ColorDrawableReference;
import com.facebook.litho.reference.Reference;
import com.facebook.litho.reference.ResourceDrawableReference;
import com.facebook.infer.annotation.ThreadConfined;
import com.facebook.yoga.YogaAlign;
import com.facebook.yoga.YogaBaselineFunction;
import com.facebook.yoga.YogaFlexDirection;
import com.facebook.yoga.YogaJustify;
import com.facebook.yoga.YogaDirection;
import com.facebook.yoga.YogaPositionType;
import com.facebook.yoga.YogaWrap;
import com.facebook.yoga.YogaEdge;
import com.facebook.yoga.YogaConstants;
import com.facebook.yoga.YogaMeasureFunction;
import com.facebook.yoga.YogaNode;
import com.facebook.yoga.YogaNodeAPI;
import com.facebook.yoga.YogaOverflow;
import com.facebook.yoga.Spacing;
import static android.os.Build.VERSION.SDK_INT;
import static android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH;
import static android.os.Build.VERSION_CODES.JELLY_BEAN;
import static android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
import static android.support.annotation.Dimension.DP;
import static com.facebook.litho.ComponentContext.NULL_LAYOUT;
import static com.facebook.yoga.YogaEdge.ALL;
import static com.facebook.yoga.YogaEdge.BOTTOM;
import static com.facebook.yoga.YogaEdge.END;
import static com.facebook.yoga.YogaEdge.HORIZONTAL;
import static com.facebook.yoga.YogaEdge.LEFT;
import static com.facebook.yoga.YogaEdge.RIGHT;
import static com.facebook.yoga.YogaEdge.START;
import static com.facebook.yoga.YogaEdge.TOP;
import static com.facebook.yoga.YogaEdge.VERTICAL;
/**
* Internal class representing both a {@link ComponentLayout} and a
* {@link com.facebook.litho.ComponentLayout.ContainerBuilder}.
*/
@ThreadConfined(ThreadConfined.ANY)
class InternalNode implements ComponentLayout, ComponentLayout.ContainerBuilder {
// Used to check whether or not the framework can use style IDs for
// paddingStart/paddingEnd due to a bug in some Android devices.
private static final boolean SUPPORTS_RTL = (SDK_INT >= JELLY_BEAN_MR1);
// When this flag is set, layoutDirection style was explicitly set on this node.
private static final long PFLAG_LAYOUT_DIRECTION_IS_SET = 1L << 0;
// When this flag is set, alignSelf was explicitly set on this node.
private static final long PFLAG_ALIGN_SELF_IS_SET = 1L << 1;
// When this flag is set, position type was explicitly set on this node.
private static final long PFLAG_POSITION_TYPE_IS_SET = 1L << 2;
// When this flag is set, flex was explicitly set on this node.
private static final long PFLAG_FLEX_IS_SET = 1L << 3;
// When this flag is set, flex grow was explicitly set on this node.
private static final long PFLAG_FLEX_GROW_IS_SET = 1L << 4;
// When this flag is set, flex shrink was explicitly set on this node.
private static final long PFLAG_FLEX_SHRINK_IS_SET = 1L << 5;
// When this flag is set, flex basis was explicitly set on this node.
private static final long PFLAG_FLEX_BASIS_IS_SET = 1L << 6;
// When this flag is set, importantForAccessibility was explicitly set on this node.
private static final long PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET = 1L << 7;
// When this flag is set, duplicateParentState was explicitly set on this node.
private static final long PFLAG_DUPLICATE_PARENT_STATE_IS_SET = 1L << 8;
// When this flag is set, margin was explicitly set on this node.
private static final long PFLAG_MARGIN_IS_SET = 1L << 9;
// When this flag is set, padding was explicitly set on this node.
private static final long PFLAG_PADDING_IS_SET = 1L << 10;
// When this flag is set, position was explicitly set on this node.
private static final long PFLAG_POSITION_IS_SET = 1L << 11;
// When this flag is set, width was explicitly set on this node.
private static final long PFLAG_WIDTH_IS_SET = 1L << 12;
// When this flag is set, minWidth was explicitly set on this node.
private static final long PFLAG_MIN_WIDTH_IS_SET = 1L << 13;
// When this flag is set, maxWidth was explicitly set on this node.
private static final long PFLAG_MAX_WIDTH_IS_SET = 1L << 14;
// When this flag is set, height was explicitly set on this node.
private static final long PFLAG_HEIGHT_IS_SET = 1L << 15;
// When this flag is set, minHeight was explicitly set on this node.
private static final long PFLAG_MIN_HEIGHT_IS_SET = 1L << 16;
// When this flag is set, maxHeight was explicitly set on this node.
private static final long PFLAG_MAX_HEIGHT_IS_SET = 1L << 17;
// When this flag is set, background was explicitly set on this node.
private static final long PFLAG_BACKGROUND_IS_SET = 1L << 18;
// When this flag is set, foreground was explicitly set on this node.
private static final long PFLAG_FOREGROUND_IS_SET = 1L << 19;
// When this flag is set, visibleHandler was explicitly set on this node.
private static final long PFLAG_VISIBLE_HANDLER_IS_SET = 1L << 20;
// When this flag is set, focusedHandler was explicitly set on this node.
private static final long PFLAG_FOCUSED_HANDLER_IS_SET = 1L << 21;
// When this flag is set, fullImpressionHandler was explicitly set on this node.
private static final long PFLAG_FULL_IMPRESSION_HANDLER_IS_SET = 1L << 22;
// When this flag is set, invisibleHandler was explicitly set on this node.
private static final long PFLAG_INVISIBLE_HANDLER_IS_SET = 1L << 23;
// When this flag is set, touch expansion was explicitly set on this node.
private static final long PFLAG_TOUCH_EXPANSION_IS_SET = 1L << 24;
// When this flag is set, border width was explicitly set on this node.
private static final long PFLAG_BORDER_WIDTH_IS_SET = 1L << 25;
// When this flag is set, aspectRatio was explicitly set on this node.
private static final long PFLAG_ASPECT_RATIO_IS_SET = 1L << 26;
// When this flag is set, transitionKey was explicitly set on this node.
private static final long PFLAG_TRANSITION_KEY_IS_SET = 1L << 27;
// When this flag is set, border color was explicitly set on this node.
private static final long PFLAG_BORDER_COLOR_IS_SET = 1L << 28;
private final ResourceResolver mResourceResolver = new ResourceResolver();
YogaNodeAPI mYogaNode;
private ComponentContext mComponentContext;
private Resources mResources;
private Component mComponent;
private int mImportantForAccessibility = ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO;
private boolean mDuplicateParentState;
private boolean mIsNestedTreeHolder;
private InternalNode mNestedTree;
private InternalNode mNestedTreeHolder;
private long mPrivateFlags;
private Reference<? extends Drawable> mBackground;
private Reference<? extends Drawable> mForeground;
private int mBorderColor = Color.TRANSPARENT;
private NodeInfo mNodeInfo;
private boolean mForceViewWrapping;
private String mTransitionKey;
private EventHandler mVisibleHandler;
private EventHandler mFocusedHandler;
private EventHandler mFullImpressionHandler;
private EventHandler mInvisibleHandler;
private String mTestKey;
private Spacing mTouchExpansion;
private Spacing mNestedTreePadding;
private Spacing mNestedTreeBorderWidth;
private boolean[] mIsPaddingPercent;
private float mResolvedTouchExpansionLeft = YogaConstants.UNDEFINED;
private float mResolvedTouchExpansionRight = YogaConstants.UNDEFINED;
private float mResolvedX = YogaConstants.UNDEFINED;
private float mResolvedY = YogaConstants.UNDEFINED;
private float mResolvedWidth = YogaConstants.UNDEFINED;
private float mResolvedHeight = YogaConstants.UNDEFINED;
private int mLastWidthSpec = DiffNode.UNSPECIFIED;
private int mLastHeightSpec = DiffNode.UNSPECIFIED;
private float mLastMeasuredWidth = DiffNode.UNSPECIFIED;
private float mLastMeasuredHeight = DiffNode.UNSPECIFIED;
private DiffNode mDiffNode;
private boolean mCachedMeasuresValid;
private TreeProps mPendingTreeProps;
void init(YogaNodeAPI yogaNode, ComponentContext componentContext, Resources resources) {
yogaNode.setData(this);
yogaNode.setOverflow(YogaOverflow.HIDDEN);
yogaNode.setMeasureFunction(null);
// YogaNode is the only version of YogaNodeAPI with this support;
if (yogaNode instanceof YogaNode) {
yogaNode.setBaselineFunction(null);
}
mYogaNode = yogaNode;
mComponentContext = componentContext;
mResources = resources;
mResourceResolver.init(
mComponentContext,
componentContext.getResourceCache());
}
@Px
@Override
public int getX() {
if (YogaConstants.isUndefined(mResolvedX)) {
mResolvedX = mYogaNode.getLayoutX();
}
return (int) mResolvedX;
}
@Px
@Override
public int getY() {
if (YogaConstants.isUndefined(mResolvedY)) {
mResolvedY = mYogaNode.getLayoutY();
}
return (int) mResolvedY;
}
@Px
@Override
public int getWidth() {
if (YogaConstants.isUndefined(mResolvedWidth)) {
mResolvedWidth = mYogaNode.getLayoutWidth();
}
return (int) mResolvedWidth;
}
@Px
@Override
public int getHeight() {
if (YogaConstants.isUndefined(mResolvedHeight)) {
mResolvedHeight = mYogaNode.getLayoutHeight();
}
return (int) mResolvedHeight;
}
@Px
@Override
public int getPaddingLeft() {
return FastMath.round(mYogaNode.getLayoutPadding(LEFT));
}
@Px
@Override
public int getPaddingTop() {
return FastMath.round(mYogaNode.getLayoutPadding(TOP));
}
@Px
@Override
public int getPaddingRight() {
return FastMath.round(mYogaNode.getLayoutPadding(RIGHT));
}
@Px
@Override
public int getPaddingBottom() {
return FastMath.round(mYogaNode.getLayoutPadding(BOTTOM));
}
public Reference<? extends Drawable> getBackground() {
return mBackground;
}
public Reference<? extends Drawable> getForeground() {
return mForeground;
}
public void setCachedMeasuresValid(boolean valid) {
mCachedMeasuresValid = valid;
}
public int getLastWidthSpec() {
return mLastWidthSpec;
}
public void setLastWidthSpec(int widthSpec) {
mLastWidthSpec = widthSpec;
}
public int getLastHeightSpec() {
return mLastHeightSpec;
}
public void setLastHeightSpec(int heightSpec) {
mLastHeightSpec = heightSpec;
}
public boolean hasVisibilityHandlers() {
return mVisibleHandler != null
|| mFocusedHandler != null
|| mFullImpressionHandler != null
|| mInvisibleHandler != null;
}
/**
* The last value the measure funcion associated with this node {@link Component} returned
* for the width. This is used together with {@link InternalNode#getLastWidthSpec()}
* to implement measure caching.
*/
float getLastMeasuredWidth() {
return mLastMeasuredWidth;
}
/**
* Sets the last value the measure funcion associated with this node {@link Component} returned
* for the width.
*/
void setLastMeasuredWidth(float lastMeasuredWidth) {
mLastMeasuredWidth = lastMeasuredWidth;
}
/**
* The last value the measure funcion associated with this node {@link Component} returned
* for the height. This is used together with {@link InternalNode#getLastHeightSpec()}
* to implement measure caching.
*/
float getLastMeasuredHeight() {
return mLastMeasuredHeight;
}
/**
* Sets the last value the measure funcion associated with this node {@link Component} returned
* for the height.
*/
void setLastMeasuredHeight(float lastMeasuredHeight) {
mLastMeasuredHeight = lastMeasuredHeight;
}
DiffNode getDiffNode() {
return mDiffNode;
}
boolean areCachedMeasuresValid() {
return mCachedMeasuresValid;
}
void setDiffNode(DiffNode diffNode) {
mDiffNode = diffNode;
}
/**
* Mark this node as a nested tree root holder.
*/
void markIsNestedTreeHolder(TreeProps currentTreeProps) {
mIsNestedTreeHolder = true;
mPendingTreeProps = TreeProps.copy(currentTreeProps);
}
/**
* @return Whether this node is holding a nested tree or not. The decision was made during
* tree creation {@link ComponentLifecycle#createLayout(ComponentContext, Component, boolean)}.
*/
boolean isNestedTreeHolder() {
return mIsNestedTreeHolder;
}
@Override
public YogaDirection getResolvedLayoutDirection() {
return mYogaNode.getLayoutDirection();
}
@Override
public InternalNode layoutDirection(YogaDirection direction) {
mPrivateFlags |= PFLAG_LAYOUT_DIRECTION_IS_SET;
mYogaNode.setDirection(direction);
return this;
}
@Override
public InternalNode flexDirection(YogaFlexDirection direction) {
mYogaNode.setFlexDirection(direction);
return this;
}
@Override
public InternalNode wrap(YogaWrap wrap) {
mYogaNode.setWrap(wrap);
return this;
}
@Override
public InternalNode justifyContent(YogaJustify justifyContent) {
mYogaNode.setJustifyContent(justifyContent);
return this;
}
@Override
public InternalNode alignItems(YogaAlign alignItems) {
mYogaNode.setAlignItems(alignItems);
return this;
}
@Override
public InternalNode alignContent(YogaAlign alignContent) {
mYogaNode.setAlignContent(alignContent);
return this;
}
@Override
public InternalNode alignSelf(YogaAlign alignSelf) {
mPrivateFlags |= PFLAG_ALIGN_SELF_IS_SET;
mYogaNode.setAlignSelf(alignSelf);
return this;
}
@Override
public InternalNode positionType(YogaPositionType positionType) {
mPrivateFlags |= PFLAG_POSITION_TYPE_IS_SET;
mYogaNode.setPositionType(positionType);
return this;
}
@Override
public InternalNode flex(float flex) {
mPrivateFlags |= PFLAG_FLEX_IS_SET;
mYogaNode.setFlex(flex);
return this;
}
@Override
public InternalNode flexGrow(float flexGrow) {
mPrivateFlags |= PFLAG_FLEX_GROW_IS_SET;
mYogaNode.setFlexGrow(flexGrow);
return this;
}
@Override
public InternalNode flexShrink(float flexShrink) {
mPrivateFlags |= PFLAG_FLEX_SHRINK_IS_SET;
mYogaNode.setFlexShrink(flexShrink);
return this;
}
@Override
public InternalNode flexBasisPx(@Px int flexBasis) {
mPrivateFlags |= PFLAG_FLEX_BASIS_IS_SET;
mYogaNode.setFlexBasis(flexBasis);
return this;
}
@Override
public InternalNode flexBasisPercent(float percent) {
mPrivateFlags |= PFLAG_FLEX_BASIS_IS_SET;
mYogaNode.setFlexBasisPercent(percent);
return this;
}
@Override
public InternalNode flexBasisAttr(@AttrRes int resId, @DimenRes int defaultResId) {
return flexBasisPx(mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId));
}
@Override
public InternalNode flexBasisAttr(@AttrRes int resId) {
return flexBasisAttr(resId, 0);
}
@Override
public InternalNode flexBasisRes(@DimenRes int resId) {
return flexBasisPx(mResourceResolver.resolveDimenOffsetRes(resId));
}
@Override
public InternalNode flexBasisDip(@Dimension(unit = DP) int flexBasis) {
return flexBasisPx(mResourceResolver.dipsToPixels(flexBasis));
}
@Override
public InternalNode importantForAccessibility(int importantForAccessibility) {
mPrivateFlags |= PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET;
mImportantForAccessibility = importantForAccessibility;
return this;
}
@Override
public InternalNode duplicateParentState(boolean duplicateParentState) {
mPrivateFlags |= PFLAG_DUPLICATE_PARENT_STATE_IS_SET;
mDuplicateParentState = duplicateParentState;
return this;
}
@Override
public InternalNode marginPx(YogaEdge edge, @Px int margin) {
mPrivateFlags |= PFLAG_MARGIN_IS_SET;
mYogaNode.setMargin(edge, margin);
return this;
}
@Override
public InternalNode marginPercent(YogaEdge edge, float percent) {
mPrivateFlags |= PFLAG_MARGIN_IS_SET;
mYogaNode.setMarginPercent(edge, percent);
return this;
}
@Override
public InternalNode marginAuto(YogaEdge edge) {
mPrivateFlags |= PFLAG_MARGIN_IS_SET;
mYogaNode.setMarginAuto(edge);
return this;
}
@Override
public InternalNode marginAttr(
YogaEdge edge,
@AttrRes int resId,
@DimenRes int defaultResId) {
return marginPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId));
}
@Override
public InternalNode marginAttr(
YogaEdge edge,
@AttrRes int resId) {
return marginAttr(edge, resId, 0);
}
@Override
public InternalNode marginRes(YogaEdge edge, @DimenRes int resId) {
return marginPx(edge, mResourceResolver.resolveDimenOffsetRes(resId));
}
@Override
public InternalNode marginDip(YogaEdge edge, @Dimension(unit = DP) int margin) {
return marginPx(edge, mResourceResolver.dipsToPixels(margin));
}
@Override
public InternalNode paddingPx(YogaEdge edge, @Px int padding) {
mPrivateFlags |= PFLAG_PADDING_IS_SET;
if (mIsNestedTreeHolder) {
if (mNestedTreePadding == null) {
mNestedTreePadding = ComponentsPools.acquireSpacing();
}
mNestedTreePadding.set(edge.intValue(), padding);
setIsPaddingPercent(edge, false);
} else {
mYogaNode.setPadding(edge, padding);
}
return this;
}
@Override
public InternalNode paddingPercent(YogaEdge edge, float percent) {
mPrivateFlags |= PFLAG_PADDING_IS_SET;
if (mIsNestedTreeHolder) {
if (mNestedTreePadding == null) {
mNestedTreePadding = ComponentsPools.acquireSpacing();
}
mNestedTreePadding.set(edge.intValue(), percent);
setIsPaddingPercent(edge, true);
} else {
mYogaNode.setPaddingPercent(edge, percent);
}
return this;
}
@Override
public InternalNode paddingAttr(
YogaEdge edge,
@AttrRes int resId,
@DimenRes int defaultResId) {
return paddingPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId));
}
@Override
public InternalNode paddingAttr(
YogaEdge edge,
@AttrRes int resId) {
return paddingAttr(edge, resId, 0);
}
@Override
public InternalNode paddingRes(YogaEdge edge, @DimenRes int resId) {
return paddingPx(edge, mResourceResolver.resolveDimenOffsetRes(resId));
}
@Override
public InternalNode paddingDip(YogaEdge edge, @Dimension(unit = DP) int padding) {
return paddingPx(edge, mResourceResolver.dipsToPixels(padding));
}
@Override
public InternalNode borderWidthPx(YogaEdge edge, @Px int borderWidth) {
mPrivateFlags |= PFLAG_BORDER_WIDTH_IS_SET;
if (mIsNestedTreeHolder) {
if (mNestedTreeBorderWidth == null) {
mNestedTreeBorderWidth = ComponentsPools.acquireSpacing();
}
mNestedTreeBorderWidth.set(edge.intValue(), borderWidth);
} else {
mYogaNode.setBorder(edge, borderWidth);
}
return this;
}
@Override
public InternalNode borderWidthAttr(
YogaEdge edge,
@AttrRes int resId,
@DimenRes int defaultResId) {
return borderWidthPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId));
}
@Override
public InternalNode borderWidthAttr(
YogaEdge edge,
@AttrRes int resId) {
return borderWidthAttr(edge, resId, 0);
}
@Override
public InternalNode borderWidthRes(YogaEdge edge, @DimenRes int resId) {
return borderWidthPx(edge, mResourceResolver.resolveDimenOffsetRes(resId));
}
@Override
public InternalNode borderWidthDip(
YogaEdge edge,
@Dimension(unit = DP) int borderWidth) {
return borderWidthPx(edge, mResourceResolver.dipsToPixels(borderWidth));
}
@Override
public Builder borderColor(@ColorInt int borderColor) {
mPrivateFlags |= PFLAG_BORDER_COLOR_IS_SET;
mBorderColor = borderColor;
return this;
}
@Override
public InternalNode positionPx(YogaEdge edge, @Px int position) {
mPrivateFlags |= PFLAG_POSITION_IS_SET;
mYogaNode.setPosition(edge, position);
return this;
}
@Override
public InternalNode positionPercent(YogaEdge edge, float percent) {
mPrivateFlags |= PFLAG_POSITION_IS_SET;
mYogaNode.setPositionPercent(edge, percent);
return this;
}
@Override
public InternalNode positionAttr(
YogaEdge edge,
@AttrRes int resId,
@DimenRes int defaultResId) {
return positionPx(edge, mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId));
}
@Override
public InternalNode positionAttr(YogaEdge edge, @AttrRes int resId) {
return positionAttr(edge, resId, 0);
}
@Override
public InternalNode positionRes(YogaEdge edge, @DimenRes int resId) {
return positionPx(edge, mResourceResolver.resolveDimenOffsetRes(resId));
}
@Override
public InternalNode positionDip(
YogaEdge edge,
@Dimension(unit = DP) int position) {
return positionPx(edge, mResourceResolver.dipsToPixels(position));
}
@Override
public InternalNode widthPx(@Px int width) {
mPrivateFlags |= PFLAG_WIDTH_IS_SET;
mYogaNode.setWidth(width);
return this;
}
@Override
public InternalNode widthPercent(float percent) {
mPrivateFlags |= PFLAG_WIDTH_IS_SET;
mYogaNode.setWidthPercent(percent);
return this;
}
@Override
public InternalNode widthRes(@DimenRes int resId) {
return widthPx(mResourceResolver.resolveDimenSizeRes(resId));
}
@Override
public InternalNode widthAttr(@AttrRes int resId, @DimenRes int defaultResId) {
return widthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId));
}
@Override
public InternalNode widthAttr(@AttrRes int resId) {
return widthAttr(resId, 0);
}
@Override
public InternalNode widthDip(@Dimension(unit = DP) int width) {
return widthPx(mResourceResolver.dipsToPixels(width));
}
@Override
public InternalNode minWidthPx(@Px int minWidth) {
mPrivateFlags |= PFLAG_MIN_WIDTH_IS_SET;
mYogaNode.setMinWidth(minWidth);
return this;
}
@Override
public InternalNode minWidthPercent(float percent) {
mPrivateFlags |= PFLAG_MIN_WIDTH_IS_SET;
mYogaNode.setMinWidthPercent(percent);
return this;
}
@Override
public InternalNode minWidthAttr(@AttrRes int resId, @DimenRes int defaultResId) {
return minWidthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId));
}
@Override
public InternalNode minWidthAttr(@AttrRes int resId) {
return minWidthAttr(resId, 0);
}
@Override
public InternalNode minWidthRes(@DimenRes int resId) {
return minWidthPx(mResourceResolver.resolveDimenSizeRes(resId));
}
@Override
public InternalNode minWidthDip(@Dimension(unit = DP) int minWidth) {
return minWidthPx(mResourceResolver.dipsToPixels(minWidth));
}
@Override
public InternalNode maxWidthPx(@Px int maxWidth) {
mPrivateFlags |= PFLAG_MAX_WIDTH_IS_SET;
mYogaNode.setMaxWidth(maxWidth);
return this;
}
@Override
public InternalNode maxWidthPercent(float percent) {
mPrivateFlags |= PFLAG_MAX_WIDTH_IS_SET;
mYogaNode.setMaxWidthPercent(percent);
return this;
}
@Override
public InternalNode maxWidthAttr(@AttrRes int resId, @DimenRes int defaultResId) {
return maxWidthPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId));
}
@Override
public InternalNode maxWidthAttr(@AttrRes int resId) {
return maxWidthAttr(resId, 0);
}
@Override
public InternalNode maxWidthRes(@DimenRes int resId) {
return maxWidthPx(mResourceResolver.resolveDimenSizeRes(resId));
}
@Override
public InternalNode maxWidthDip(@Dimension(unit = DP) int maxWidth) {
return maxWidthPx(mResourceResolver.dipsToPixels(maxWidth));
}
@Override
public InternalNode heightPx(@Px int height) {
mPrivateFlags |= PFLAG_HEIGHT_IS_SET;
mYogaNode.setHeight(height);
return this;
}
@Override
public InternalNode heightPercent(float percent) {
mPrivateFlags |= PFLAG_HEIGHT_IS_SET;
mYogaNode.setHeightPercent(percent);
return this;
}
@Override
public InternalNode heightRes(@DimenRes int resId) {
return heightPx(mResourceResolver.resolveDimenSizeRes(resId));
}
@Override
public InternalNode heightAttr(@AttrRes int resId, @DimenRes int defaultResId) {
return heightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId));
}
@Override
public InternalNode heightAttr(@AttrRes int resId) {
return heightAttr(resId, 0);
}
@Override
public InternalNode heightDip(@Dimension(unit = DP) int height) {
return heightPx(mResourceResolver.dipsToPixels(height));
}
@Override
public InternalNode minHeightPx(@Px int minHeight) {
mPrivateFlags |= PFLAG_MIN_HEIGHT_IS_SET;
mYogaNode.setMinHeight(minHeight);
return this;
}
@Override
public InternalNode minHeightPercent(float percent) {
mPrivateFlags |= PFLAG_MIN_HEIGHT_IS_SET;
mYogaNode.setMinHeightPercent(percent);
return this;
}
@Override
public InternalNode minHeightAttr(@AttrRes int resId, @DimenRes int defaultResId) {
return minHeightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId));
}
@Override
public InternalNode minHeightAttr(@AttrRes int resId) {
return minHeightAttr(resId, 0);
}
@Override
public InternalNode minHeightRes(@DimenRes int resId) {
return minHeightPx(mResourceResolver.resolveDimenSizeRes(resId));
}
@Override
public InternalNode minHeightDip(@Dimension(unit = DP) int minHeight) {
return minHeightPx(mResourceResolver.dipsToPixels(minHeight));
}
@Override
public InternalNode maxHeightPx(@Px int maxHeight) {
mPrivateFlags |= PFLAG_MAX_HEIGHT_IS_SET;
mYogaNode.setMaxHeight(maxHeight);
return this;
}
@Override
public InternalNode maxHeightPercent(float percent) {
mPrivateFlags |= PFLAG_MAX_HEIGHT_IS_SET;
mYogaNode.setMaxHeightPercent(percent);
return this;
}
@Override
public InternalNode maxHeightAttr(@AttrRes int resId, @DimenRes int defaultResId) {
return maxHeightPx(mResourceResolver.resolveDimenSizeAttr(resId, defaultResId));
}
@Override
public InternalNode maxHeightAttr(@AttrRes int resId) {
return maxHeightAttr(resId, 0);
}
@Override
public InternalNode maxHeightRes(@DimenRes int resId) {
return maxHeightPx(mResourceResolver.resolveDimenSizeRes(resId));
}
@Override
public InternalNode maxHeightDip(@Dimension(unit = DP) int maxHeight) {
return maxHeightPx(mResourceResolver.dipsToPixels(maxHeight));
}
@Override
public InternalNode aspectRatio(float aspectRatio) {
mPrivateFlags |= PFLAG_ASPECT_RATIO_IS_SET;
if (mYogaNode instanceof YogaNode) {
((YogaNode) mYogaNode).setAspectRatio(aspectRatio);
return this;
} else {
throw new IllegalStateException("Aspect ration requires using YogaNode not YogaNodeDEPRECATED");
}
}
private boolean shouldApplyTouchExpansion() {
return mTouchExpansion != null && mNodeInfo != null && mNodeInfo.hasTouchEventHandlers();
}
boolean hasTouchExpansion() {
return ((mPrivateFlags & PFLAG_TOUCH_EXPANSION_IS_SET) != 0L);
}
Spacing getTouchExpansion() {
return mTouchExpansion;
}
int getTouchExpansionLeft() {
if (!shouldApplyTouchExpansion()) {
return 0;
}
if (YogaConstants.isUndefined(mResolvedTouchExpansionLeft)) {
mResolvedTouchExpansionLeft = resolveHorizontalSpacing(mTouchExpansion, Spacing.LEFT);
}
return FastMath.round(mResolvedTouchExpansionLeft);
}
int getTouchExpansionTop() {
if (!shouldApplyTouchExpansion()) {
return 0;
}
return FastMath.round(mTouchExpansion.get(Spacing.TOP));
}
int getTouchExpansionRight() {
if (!shouldApplyTouchExpansion()) {
return 0;
}
if (YogaConstants.isUndefined(mResolvedTouchExpansionRight)) {
mResolvedTouchExpansionRight = resolveHorizontalSpacing(mTouchExpansion, Spacing.RIGHT);
}
return FastMath.round(mResolvedTouchExpansionRight);
}
int getTouchExpansionBottom() {
if (!shouldApplyTouchExpansion()) {
return 0;
}
return FastMath.round(mTouchExpansion.get(Spacing.BOTTOM));
}
@Override
public InternalNode touchExpansionPx(YogaEdge edge, @Px int touchExpansion) {
if (mTouchExpansion == null) {
mTouchExpansion = ComponentsPools.acquireSpacing();
}
mPrivateFlags |= PFLAG_TOUCH_EXPANSION_IS_SET;
mTouchExpansion.set(edge.intValue(), touchExpansion);
return this;
}
@Override
public InternalNode touchExpansionAttr(
YogaEdge edge,
@AttrRes int resId,
@DimenRes int defaultResId) {
return touchExpansionPx(
edge,
mResourceResolver.resolveDimenOffsetAttr(resId, defaultResId));
}
@Override
public InternalNode touchExpansionAttr(
YogaEdge edge,
@AttrRes int resId) {
return touchExpansionAttr(edge, resId, 0);
}
@Override
public InternalNode touchExpansionRes(YogaEdge edge, @DimenRes int resId) {
return touchExpansionPx(edge, mResourceResolver.resolveDimenOffsetRes(resId));
}
@Override
public InternalNode touchExpansionDip(
YogaEdge edge,
@Dimension(unit = DP) int touchExpansion) {
return touchExpansionPx(edge, mResourceResolver.dipsToPixels(touchExpansion));
}
@Override
public InternalNode child(ComponentLayout child) {
if (child != null && child != NULL_LAYOUT) {
addChildAt((InternalNode) child, mYogaNode.getChildCount());
}
return this;
}
@Override
public InternalNode child(ComponentLayout.Builder child) {
if (child != null && child != NULL_LAYOUT) {
child(child.build());
}
return this;
}
@Override
public InternalNode child(Component<?> child) {
if (child != null) {
child(Layout.create(mComponentContext, child).flexShrink(0).flexShrink(0).flexShrink(0));
}
return this;
}
@Override
public InternalNode child(Component.Builder<?> child) {
if (child != null) {
child(child.build());
}
return this;
}
@Override
public InternalNode background(Reference<? extends Drawable> background) {
mPrivateFlags |= PFLAG_BACKGROUND_IS_SET;
mBackground = background;
setPaddingFromDrawableReference(background);
return this;
}
@Override
public InternalNode background(Reference.Builder<? extends Drawable> builder) {
return background(builder.build());
}
@Override
public InternalNode backgroundAttr(@AttrRes int resId, @DrawableRes int defaultResId) {
return backgroundRes(mResourceResolver.resolveResIdAttr(resId, defaultResId));
}
@Override
public InternalNode backgroundAttr(@AttrRes int resId) {
return backgroundAttr(resId, 0);
}
@Override
public InternalNode backgroundRes(@DrawableRes int resId) {
if (resId == 0) {
return background((Reference<Drawable>) null);
}
return background(
ResourceDrawableReference.create(mComponentContext)
.resId(resId)
.build());
}
@Override
public InternalNode backgroundColor(@ColorInt int backgroundColor) {
return background(
ColorDrawableReference.create(mComponentContext)
.color(backgroundColor)
.build());
}
@Override
public InternalNode foreground(Reference<? extends Drawable> foreground) {
mPrivateFlags |= PFLAG_FOREGROUND_IS_SET;
mForeground = foreground;
return this;
}
@Override
public InternalNode foreground(Reference.Builder<? extends Drawable> builder) {
return foreground(builder.build());
}
@Override
public InternalNode foregroundAttr(@AttrRes int resId, @DrawableRes int defaultResId) {
return foregroundRes(mResourceResolver.resolveResIdAttr(resId, defaultResId));
}
@Override
public InternalNode foregroundAttr(@AttrRes int resId) {
return foregroundAttr(resId, 0);
}
@Override
public InternalNode foregroundRes(@DrawableRes int resId) {
if (resId == 0) {
return foreground((Reference<Drawable>) null);
}
return foreground(
ResourceDrawableReference.create(mComponentContext)
.resId(resId)
.build());
}
@Override
public InternalNode foregroundColor(@ColorInt int foregroundColor) {
return foreground(
ColorDrawableReference.create(mComponentContext)
.color(foregroundColor)
.build());
}
@Override
public InternalNode wrapInView() {
mForceViewWrapping = true;
return this;
}
boolean isForceViewWrapping() {
return mForceViewWrapping;
}
@Override
public InternalNode clickHandler(EventHandler clickHandler) {
getOrCreateNodeInfo().setClickHandler(clickHandler);
return this;
}
@Override
public InternalNode longClickHandler(EventHandler longClickHandler) {
getOrCreateNodeInfo().setLongClickHandler(longClickHandler);
return this;
}
@Override
public InternalNode touchHandler(EventHandler touchHandler) {
getOrCreateNodeInfo().setTouchHandler(touchHandler);
return this;
}
@Override
public ContainerBuilder focusable(boolean isFocusable) {
getOrCreateNodeInfo().setFocusable(isFocusable);
return this;
}
@Override
public InternalNode visibleHandler(EventHandler visibleHandler) {
mPrivateFlags |= PFLAG_VISIBLE_HANDLER_IS_SET;
mVisibleHandler = visibleHandler;
return this;
}
EventHandler getVisibleHandler() {
return mVisibleHandler;
}
@Override
public InternalNode focusedHandler(EventHandler focusedHandler) {
mPrivateFlags |= PFLAG_FOCUSED_HANDLER_IS_SET;
mFocusedHandler = focusedHandler;
return this;
}
EventHandler getFocusedHandler() {
return mFocusedHandler;
}
@Override
public InternalNode fullImpressionHandler(EventHandler fullImpressionHandler) {
mPrivateFlags |= PFLAG_FULL_IMPRESSION_HANDLER_IS_SET;
mFullImpressionHandler = fullImpressionHandler;
return this;
}
EventHandler getFullImpressionHandler() {
return mFullImpressionHandler;
}
@Override
public InternalNode invisibleHandler(EventHandler invisibleHandler) {
mPrivateFlags |= PFLAG_INVISIBLE_HANDLER_IS_SET;
mInvisibleHandler = invisibleHandler;
return this;
}
EventHandler getInvisibleHandler() {
return mInvisibleHandler;
}
@Override
public InternalNode contentDescription(CharSequence contentDescription) {
getOrCreateNodeInfo().setContentDescription(contentDescription);
return this;
}
@Override
public InternalNode contentDescription(@StringRes int stringId) {
return contentDescription(mResources.getString(stringId));
}
@Override
public InternalNode contentDescription(@StringRes int stringId, Object... formatArgs) {
return contentDescription(mResources.getString(stringId, formatArgs));
}
@Override
public InternalNode viewTag(Object viewTag) {
getOrCreateNodeInfo().setViewTag(viewTag);
return this;
}
@Override
public InternalNode viewTags(SparseArray<Object> viewTags) {
getOrCreateNodeInfo().setViewTags(viewTags);
return this;
}
@Override
public InternalNode testKey(String testKey) {
mTestKey = testKey;
return this;
}
@Override
public InternalNode dispatchPopulateAccessibilityEventHandler(
EventHandler<DispatchPopulateAccessibilityEventEvent>
dispatchPopulateAccessibilityEventHandler) {
getOrCreateNodeInfo().setDispatchPopulateAccessibilityEventHandler(
dispatchPopulateAccessibilityEventHandler);
return this;
}
@Override
public InternalNode onInitializeAccessibilityEventHandler(
EventHandler<OnInitializeAccessibilityEventEvent> onInitializeAccessibilityEventHandler) {
getOrCreateNodeInfo().setOnInitializeAccessibilityEventHandler(
onInitializeAccessibilityEventHandler);
return this;
}
@Override
public InternalNode onInitializeAccessibilityNodeInfoHandler(
EventHandler<OnInitializeAccessibilityNodeInfoEvent>
onInitializeAccessibilityNodeInfoHandler) {
getOrCreateNodeInfo().setOnInitializeAccessibilityNodeInfoHandler(
onInitializeAccessibilityNodeInfoHandler);
return this;
}
@Override
public InternalNode onPopulateAccessibilityEventHandler(
EventHandler<OnPopulateAccessibilityEventEvent> onPopulateAccessibilityEventHandler) {
getOrCreateNodeInfo().setOnPopulateAccessibilityEventHandler(
onPopulateAccessibilityEventHandler);
return this;
}
@Override
public InternalNode onRequestSendAccessibilityEventHandler(
EventHandler<OnRequestSendAccessibilityEventEvent> onRequestSendAccessibilityEventHandler) {
getOrCreateNodeInfo().setOnRequestSendAccessibilityEventHandler(
onRequestSendAccessibilityEventHandler);
return this;
}
@Override
public InternalNode performAccessibilityActionHandler(
EventHandler<PerformAccessibilityActionEvent> performAccessibilityActionHandler) {
getOrCreateNodeInfo().setPerformAccessibilityActionHandler(performAccessibilityActionHandler);
return this;
}
@Override
public InternalNode sendAccessibilityEventHandler(
EventHandler<SendAccessibilityEventEvent> sendAccessibilityEventHandler) {
getOrCreateNodeInfo().setSendAccessibilityEventHandler(sendAccessibilityEventHandler);
return this;
}
@Override
public InternalNode sendAccessibilityEventUncheckedHandler(
EventHandler<SendAccessibilityEventUncheckedEvent> sendAccessibilityEventUncheckedHandler) {
getOrCreateNodeInfo().setSendAccessibilityEventUncheckedHandler(
sendAccessibilityEventUncheckedHandler);
return this;
}
@Override
public ContainerBuilder transitionKey(String key) {
if (SDK_INT >= ICE_CREAM_SANDWICH) {
mPrivateFlags |= PFLAG_TRANSITION_KEY_IS_SET;
mTransitionKey = key;
wrapInView();
}
return this;
}
String getTransitionKey() {
return mTransitionKey;
}
/**
* A unique identifier which may be set for retrieving a component and its bounds when testing.
*/
String getTestKey() {
return mTestKey;
}
void setMeasureFunction(YogaMeasureFunction measureFunction) {
mYogaNode.setMeasureFunction(measureFunction);
}
void setBaselineFunction(YogaBaselineFunction baselineFunction) {
// YogaNode is the only version of YogaNodeAPI with this support;
if (mYogaNode instanceof YogaNode) {
mYogaNode.setBaselineFunction(baselineFunction);
}
}
boolean hasNewLayout() {
return mYogaNode.hasNewLayout();
}
void markLayoutSeen() {
mYogaNode.markLayoutSeen();
}
float getStyleWidth() {
return mYogaNode.getWidth().value;
}
float getMinWidth() {
return mYogaNode.getMinWidth().value;
}
float getMaxWidth() {
return mYogaNode.getMaxWidth().value;
}
float getStyleHeight() {
return mYogaNode.getHeight().value;
}
float getMinHeight() {
return mYogaNode.getMinHeight().value;
}
float getMaxHeight() {
return mYogaNode.getMaxHeight().value;
}
void calculateLayout(float width, float height) {
final ComponentTree tree = mComponentContext == null
? null
: mComponentContext.getComponentTree();
final ComponentsStethoManager stethoManager = tree == null ? null : tree.getStethoManager();
if (stethoManager != null) {
applyOverridesRecursive(stethoManager, this);
}
mYogaNode.calculateLayout(width, height);
}
private static void applyOverridesRecursive(
ComponentsStethoManager stethoManager,
InternalNode node) {
stethoManager.applyOverrides(node);
for (int i = 0, count = node.getChildCount(); i < count; i++) {
applyOverridesRecursive(stethoManager, node.getChildAt(i));
}
if (node.hasNestedTree()) {
applyOverridesRecursive(stethoManager, node.getNestedTree());
}
}
void calculateLayout() {
calculateLayout(YogaConstants.UNDEFINED, YogaConstants.UNDEFINED);
}
int getChildCount() {
return mYogaNode.getChildCount();
}
com.facebook.yoga.YogaDirection getStyleDirection() {
return mYogaNode.getStyleDirection();
}
InternalNode getChildAt(int index) {
if (mYogaNode.getChildAt(index) == null) {
return null;
}
return (InternalNode) mYogaNode.getChildAt(index).getData();
}
int getChildIndex(InternalNode child) {
for (int i = 0, count = mYogaNode.getChildCount(); i < count; i++) {
if (mYogaNode.getChildAt(i) == child.mYogaNode) {
return i;
}
}
return -1;
}
InternalNode getParent() {
if (mYogaNode == null || mYogaNode.getParent() == null) {
return null;
}
return (InternalNode) mYogaNode.getParent().getData();
}
void addChildAt(InternalNode child, int index) {
mYogaNode.addChildAt(child.mYogaNode, index);
}
InternalNode removeChildAt(int index) {
return (InternalNode) mYogaNode.removeChildAt(index).getData();
}
@Override
public ComponentLayout build() {
return this;
}
private float resolveHorizontalSpacing(Spacing spacing, int index) {
final boolean isRtl =
(mYogaNode.getLayoutDirection() == YogaDirection.RTL);
final int resolvedIndex;
switch (index) {
case Spacing.LEFT:
resolvedIndex = (isRtl ? Spacing.END : Spacing.START);
break;
case Spacing.RIGHT:
resolvedIndex = (isRtl ? Spacing.START : Spacing.END);
break;
default:
throw new IllegalArgumentException("Not an horizontal padding index: " + index);
}
float result = spacing.getRaw(resolvedIndex);
if (YogaConstants.isUndefined(result)) {
result = spacing.get(index);
}
return result;
}
ComponentContext getContext() {
return mComponentContext;
}
Component getComponent() {
return mComponent;
}
int getBorderColor() {
return mBorderColor;
}
boolean shouldDrawBorders() {
return mBorderColor != Color.TRANSPARENT
&& (mYogaNode.getLayoutBorder(LEFT) != 0
|| mYogaNode.getLayoutBorder(TOP) != 0
|| mYogaNode.getLayoutBorder(RIGHT) != 0
|| mYogaNode.getLayoutBorder(BOTTOM) != 0);
}
void setComponent(Component component) {
mComponent = component;
}
boolean hasNestedTree() {
return mNestedTree != null;
}
@Nullable InternalNode getNestedTree() {
return mNestedTree;
}
InternalNode getNestedTreeHolder() {
return mNestedTreeHolder;
}
/**
* Set the nested tree before measuring it in order to transfer over important information
* such as layout direction needed during measurement.
*/
void setNestedTree(InternalNode nestedTree) {
nestedTree.mNestedTreeHolder = this;
mNestedTree = nestedTree;
}
NodeInfo getNodeInfo() {
return mNodeInfo;
}
void copyInto(InternalNode node) {
if (mNodeInfo != null) {
if (node.mNodeInfo == null) {
node.mNodeInfo = mNodeInfo.acquireRef();
} else {
node.mNodeInfo.updateWith(mNodeInfo);
}
}
if ((node.mPrivateFlags & PFLAG_LAYOUT_DIRECTION_IS_SET) == 0L
|| node.getResolvedLayoutDirection() == YogaDirection.INHERIT) {
node.layoutDirection(getResolvedLayoutDirection());
}
if ((node.mPrivateFlags & PFLAG_IMPORTANT_FOR_ACCESSIBILITY_IS_SET) == 0L
|| node.mImportantForAccessibility == ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO) {
node.mImportantForAccessibility = mImportantForAccessibility;
}
if ((mPrivateFlags & PFLAG_DUPLICATE_PARENT_STATE_IS_SET) != 0L) {
node.mDuplicateParentState = mDuplicateParentState;
}
if ((mPrivateFlags & PFLAG_BACKGROUND_IS_SET) != 0L) {
node.mBackground = mBackground;
}
if ((mPrivateFlags & PFLAG_FOREGROUND_IS_SET) != 0L) {
node.mForeground = mForeground;
}
if (mForceViewWrapping) {
node.mForceViewWrapping = true;
}
if ((mPrivateFlags & PFLAG_VISIBLE_HANDLER_IS_SET) != 0L) {
node.mVisibleHandler = mVisibleHandler;
}
if ((mPrivateFlags & PFLAG_FOCUSED_HANDLER_IS_SET) != 0L) {
node.mFocusedHandler = mFocusedHandler;
}
if ((mPrivateFlags & PFLAG_FULL_IMPRESSION_HANDLER_IS_SET) != 0L) {
node.mFullImpressionHandler = mFullImpressionHandler;
}
if ((mPrivateFlags & PFLAG_INVISIBLE_HANDLER_IS_SET) != 0L) {
node.mInvisibleHandler = mInvisibleHandler;
}
if (mTestKey != null) {
node.mTestKey = mTestKey;
}
if ((mPrivateFlags & PFLAG_PADDING_IS_SET) != 0L) {
if (mNestedTreePadding == null) {
throw new IllegalStateException("copyInto() must be used when resolving a nestedTree. " +
"If padding was set on the holder node, we must have a mNestedTreePadding instance");
}
final YogaNodeAPI yogaNode = node.mYogaNode;
node.mPrivateFlags |= PFLAG_PADDING_IS_SET;
if (isPaddingPercent(LEFT)) {
yogaNode.setPaddingPercent(LEFT, mNestedTreePadding.getRaw(Spacing.LEFT));
} else {
yogaNode.setPadding(LEFT, mNestedTreePadding.getRaw(Spacing.LEFT));
}
if (isPaddingPercent(TOP)) {
yogaNode.setPaddingPercent(TOP, mNestedTreePadding.getRaw(Spacing.TOP));
} else {
yogaNode.setPadding(TOP, mNestedTreePadding.getRaw(Spacing.TOP));
}
if (isPaddingPercent(RIGHT)) {
yogaNode.setPaddingPercent(RIGHT, mNestedTreePadding.getRaw(Spacing.RIGHT));
} else {
yogaNode.setPadding(RIGHT, mNestedTreePadding.getRaw(Spacing.RIGHT));
}
if (isPaddingPercent(BOTTOM)) {
yogaNode.setPaddingPercent(BOTTOM, mNestedTreePadding.getRaw(Spacing.BOTTOM));
} else {
yogaNode.setPadding(BOTTOM, mNestedTreePadding.getRaw(Spacing.BOTTOM));
}
if (isPaddingPercent(VERTICAL)) {
yogaNode.setPaddingPercent(VERTICAL, mNestedTreePadding.getRaw(Spacing.VERTICAL));
} else {
yogaNode.setPadding(VERTICAL, mNestedTreePadding.getRaw(Spacing.VERTICAL));
}
if (isPaddingPercent(HORIZONTAL)) {
yogaNode.setPaddingPercent(HORIZONTAL, mNestedTreePadding.getRaw(Spacing.HORIZONTAL));
} else {
yogaNode.setPadding(HORIZONTAL, mNestedTreePadding.getRaw(Spacing.HORIZONTAL));
}
if (isPaddingPercent(START)) {
yogaNode.setPaddingPercent(START, mNestedTreePadding.getRaw(Spacing.START));
} else {
yogaNode.setPadding(START, mNestedTreePadding.getRaw(Spacing.START));
}
if (isPaddingPercent(END)) {
yogaNode.setPaddingPercent(END, mNestedTreePadding.getRaw(Spacing.END));
} else {
yogaNode.setPadding(END, mNestedTreePadding.getRaw(Spacing.END));
}
if (isPaddingPercent(ALL)) {
yogaNode.setPaddingPercent(ALL, mNestedTreePadding.getRaw(Spacing.ALL));
} else {
yogaNode.setPadding(ALL, mNestedTreePadding.getRaw(Spacing.ALL));
}
}
if ((mPrivateFlags & PFLAG_BORDER_WIDTH_IS_SET) != 0L) {
if (mNestedTreeBorderWidth == null) {
throw new IllegalStateException("copyInto() must be used when resolving a nestedTree. " +
"If border width was set on the holder node, we must have a mNestedTreeBorderWidth " +
"instance");
}
final YogaNodeAPI yogaNode = node.mYogaNode;
node.mPrivateFlags |= PFLAG_BORDER_WIDTH_IS_SET;
yogaNode.setBorder(LEFT, mNestedTreeBorderWidth.getRaw(Spacing.LEFT));
yogaNode.setBorder(TOP, mNestedTreeBorderWidth.getRaw(Spacing.TOP));
yogaNode.setBorder(RIGHT, mNestedTreeBorderWidth.getRaw(Spacing.RIGHT));
yogaNode.setBorder(BOTTOM, mNestedTreeBorderWidth.getRaw(Spacing.BOTTOM));
yogaNode.setBorder(VERTICAL, mNestedTreeBorderWidth.getRaw(Spacing.VERTICAL));
yogaNode.setBorder(HORIZONTAL, mNestedTreeBorderWidth.getRaw(Spacing.HORIZONTAL));
yogaNode.setBorder(START, mNestedTreeBorderWidth.getRaw(Spacing.START));
yogaNode.setBorder(END, mNestedTreeBorderWidth.getRaw(Spacing.END));
yogaNode.setBorder(ALL, mNestedTreeBorderWidth.getRaw(Spacing.ALL));
}
if ((mPrivateFlags & PFLAG_TRANSITION_KEY_IS_SET) != 0L) {
node.mTransitionKey = mTransitionKey;
}
if ((mPrivateFlags & PFLAG_BORDER_COLOR_IS_SET) != 0L) {
node.mBorderColor = mBorderColor;
}
}
void setStyleWidthFromSpec(int widthSpec) {
switch (SizeSpec.getMode(widthSpec)) {
case SizeSpec.UNSPECIFIED:
mYogaNode.setWidth(YogaConstants.UNDEFINED);
break;
case SizeSpec.AT_MOST:
mYogaNode.setMaxWidth(SizeSpec.getSize(widthSpec));
break;
case SizeSpec.EXACTLY:
mYogaNode.setWidth(SizeSpec.getSize(widthSpec));
break;
}
}
void setStyleHeightFromSpec(int heightSpec) {
switch (SizeSpec.getMode(heightSpec)) {
case SizeSpec.UNSPECIFIED:
mYogaNode.setHeight(YogaConstants.UNDEFINED);
break;
case SizeSpec.AT_MOST:
mYogaNode.setMaxHeight(SizeSpec.getSize(heightSpec));
break;
case SizeSpec.EXACTLY:
mYogaNode.setHeight(SizeSpec.getSize(heightSpec));
break;
}
}
int getImportantForAccessibility() {
return mImportantForAccessibility;
}
boolean isDuplicateParentStateEnabled() {
return mDuplicateParentState;
}
void applyAttributes(TypedArray a) {
for (int i = 0, size = a.getIndexCount(); i < size; i++) {
final int attr = a.getIndex(i);
if (attr == R.styleable.ComponentLayout_android_layout_width) {
int width = a.getLayoutDimension(attr, -1);
// We don't support WRAP_CONTENT or MATCH_PARENT so no-op for them
if (width >= 0) {
widthPx(width);
}
} else if (attr == R.styleable.ComponentLayout_android_layout_height) {
int height = a.getLayoutDimension(attr, -1);
// We don't support WRAP_CONTENT or MATCH_PARENT so no-op for them
if (height >= 0) {
heightPx(height);
}
} else if (attr == R.styleable.ComponentLayout_android_paddingLeft) {
paddingPx(LEFT, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_paddingTop) {
paddingPx(TOP, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_paddingRight) {
paddingPx(RIGHT, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_paddingBottom) {
paddingPx(BOTTOM, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_paddingStart && SUPPORTS_RTL) {
paddingPx(START, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_paddingEnd && SUPPORTS_RTL) {
paddingPx(END, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_padding) {
paddingPx(ALL, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_layout_marginLeft) {
marginPx(LEFT, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_layout_marginTop) {
marginPx(TOP, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_layout_marginRight) {
marginPx(RIGHT, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_layout_marginBottom) {
marginPx(BOTTOM, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_layout_marginStart && SUPPORTS_RTL) {
marginPx(START, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_layout_marginEnd && SUPPORTS_RTL) {
marginPx(END, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_layout_margin) {
marginPx(ALL, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_importantForAccessibility &&
SDK_INT >= JELLY_BEAN) {
importantForAccessibility(a.getInt(attr, 0));
} else if (attr == R.styleable.ComponentLayout_android_duplicateParentState) {
duplicateParentState(a.getBoolean(attr, false));
} else if (attr == R.styleable.ComponentLayout_android_background) {
if (TypedArrayUtils.isColorAttribute(a, R.styleable.ComponentLayout_android_background)) {
backgroundColor(a.getColor(attr, 0));
} else {
backgroundRes(a.getResourceId(attr, -1));
}
} else if (attr == R.styleable.ComponentLayout_android_foreground) {
if (TypedArrayUtils.isColorAttribute(a, R.styleable.ComponentLayout_android_foreground)) {
foregroundColor(a.getColor(attr, 0));
} else {
foregroundRes(a.getResourceId(attr, -1));
}
} else if (attr == R.styleable.ComponentLayout_android_contentDescription) {
contentDescription(a.getString(attr));
} else if (attr == R.styleable.ComponentLayout_flex_direction) {
flexDirection(YogaFlexDirection.fromInt(a.getInteger(attr, 0)));
} else if (attr == R.styleable.ComponentLayout_flex_wrap) {
wrap(YogaWrap.fromInt(a.getInteger(attr, 0)));
} else if (attr == R.styleable.ComponentLayout_flex_justifyContent) {
justifyContent(YogaJustify.fromInt(a.getInteger(attr, 0)));
} else if (attr == R.styleable.ComponentLayout_flex_alignItems) {
alignItems(YogaAlign.fromInt(a.getInteger(attr, 0)));
} else if (attr == R.styleable.ComponentLayout_flex_alignSelf) {
alignSelf(YogaAlign.fromInt(a.getInteger(attr, 0)));
} else if (attr == R.styleable.ComponentLayout_flex_positionType) {
positionType(YogaPositionType.fromInt(a.getInteger(attr, 0)));
} else if (attr == R.styleable.ComponentLayout_flex) {
final float flex = a.getFloat(attr, -1);
if (flex >= 0f) {
flex(flex);
}
} else if (attr == R.styleable.ComponentLayout_flex_left) {
positionPx(LEFT, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_flex_top) {
positionPx(TOP, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_flex_right) {
positionPx(RIGHT, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_flex_bottom) {
positionPx(BOTTOM, a.getDimensionPixelOffset(attr, 0));
} else if (attr == R.styleable.ComponentLayout_flex_layoutDirection) {
final int layoutDirection = a.getInteger(attr, -1);
layoutDirection(YogaDirection.fromInt(layoutDirection));
}
}
}
/**
* Reset all attributes to default values. Intended to facilitate recycling.
*/
void release() {
if (mYogaNode.getParent() != null || mYogaNode.getChildCount() > 0) {
throw new IllegalStateException("You should not free an attached Internalnode");
}
ComponentsPools.release(mYogaNode);
mYogaNode = null;
mResourceResolver.internalRelease();
mResolvedTouchExpansionLeft = YogaConstants.UNDEFINED;
mResolvedTouchExpansionRight = YogaConstants.UNDEFINED;
mResolvedX = YogaConstants.UNDEFINED;
mResolvedY = YogaConstants.UNDEFINED;
mResolvedWidth = YogaConstants.UNDEFINED;
mResolvedHeight = YogaConstants.UNDEFINED;
mComponentContext = null;
mResources = null;
mComponent = null;
mNestedTree = null;
mNestedTreeHolder = null;
if (mNodeInfo != null) {
mNodeInfo.release();
mNodeInfo = null;
}
mImportantForAccessibility = ViewCompat.IMPORTANT_FOR_ACCESSIBILITY_AUTO;
mDuplicateParentState = false;
mBackground = null;
mForeground = null;
mForceViewWrapping = false;
mVisibleHandler = null;
mFocusedHandler = null;
mFullImpressionHandler = null;
mInvisibleHandler = null;
mPrivateFlags = 0L;
mTransitionKey = null;
mBorderColor = Color.TRANSPARENT;
mIsPaddingPercent = null;
if (mTouchExpansion != null) {
ComponentsPools.release(mTouchExpansion);
mTouchExpansion = null;
}
if (mNestedTreePadding != null) {
ComponentsPools.release(mNestedTreePadding);
mNestedTreePadding = null;
}
if (mNestedTreeBorderWidth != null) {
ComponentsPools.release(mNestedTreeBorderWidth);
mNestedTreeBorderWidth = null;
}
mLastWidthSpec = DiffNode.UNSPECIFIED;
mLastHeightSpec = DiffNode.UNSPECIFIED;
mLastMeasuredHeight = DiffNode.UNSPECIFIED;
mLastMeasuredWidth = DiffNode.UNSPECIFIED;
mDiffNode = null;
mCachedMeasuresValid = false;
mIsNestedTreeHolder = false;
mTestKey = null;
if (mPendingTreeProps != null) {
mPendingTreeProps.reset();
ComponentsPools.release(mPendingTreeProps);
mPendingTreeProps = null;
}
}
private NodeInfo getOrCreateNodeInfo() {
if (mNodeInfo == null) {
mNodeInfo = NodeInfo.acquire();
}
return mNodeInfo;
}
/**
* Check that the root of the nested tree we are going to use, has valid layout directions
* with its main tree holder node.
*/
static boolean hasValidLayoutDirectionInNestedTree(
InternalNode nestedTreeHolder,
InternalNode nestedTree) {
final boolean nestedTreeHasExplicitDirection =
((nestedTree.mPrivateFlags & PFLAG_LAYOUT_DIRECTION_IS_SET) != 0L);
final boolean hasSameLayoutDirection =
(nestedTree.getResolvedLayoutDirection() == nestedTreeHolder.getResolvedLayoutDirection());
return nestedTreeHasExplicitDirection || hasSameLayoutDirection;
}
/**
* Adds an item to a possibly nulled list to defer the allocation as long as possible.
*/
private static <A> List<A> addOrCreateList(@Nullable List<A> list, A item) {
if (list == null) {
list = new LinkedList<>();
}
list.add(item);
return list;
}
private void setIsPaddingPercent(YogaEdge edge, boolean isPaddingPercent) {
if (mIsPaddingPercent == null && isPaddingPercent) {
mIsPaddingPercent = new boolean[Spacing.ALL + 1];
}
if (mIsPaddingPercent != null) {
mIsPaddingPercent[edge.intValue()] = isPaddingPercent;
}
}
private boolean isPaddingPercent(YogaEdge edge) {
return (mIsPaddingPercent == null) ? false : mIsPaddingPercent[edge.intValue()];
}
/**
* Crash if the given node has context specific style set.
*/
static void assertContextSpecificStyleNotSet(InternalNode node) {
List<CharSequence> errorTypes = null;
if ((node.mPrivateFlags & PFLAG_ALIGN_SELF_IS_SET) != 0L) {
errorTypes = addOrCreateList(errorTypes, "alignSelf");
}
if ((node.mPrivateFlags & PFLAG_POSITION_TYPE_IS_SET) != 0L) {
errorTypes = addOrCreateList(errorTypes, "positionType");
}
if ((node.mPrivateFlags & PFLAG_FLEX_IS_SET) != 0L) {
errorTypes = addOrCreateList(errorTypes, "flex");
}
if ((node.mPrivateFlags & PFLAG_FLEX_GROW_IS_SET) != 0L) {
errorTypes = addOrCreateList(errorTypes, "flexGrow");
}
if ((node.mPrivateFlags & PFLAG_FLEX_SHRINK_IS_SET) != 0L) {
errorTypes = addOrCreateList(errorTypes, "flexShrink");
}
if ((node.mPrivateFlags & PFLAG_MARGIN_IS_SET) != 0L) {
|
package com.gaocy.sample.spider;
import com.alibaba.fastjson.JSON;
import com.gaocy.sample.util.CityUtil;
import com.gaocy.sample.util.ConfUtil;
import com.gaocy.sample.vo.CarVo;
import org.apache.commons.lang3.StringUtils;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class SpiderRunner<T extends List<CarVo>> implements Callable {
public static final ExecutorService es = Executors.newFixedThreadPool(ConfUtil.getString("init.src.list").split(",").length + 1);
private static DateFormat dfDate = new SimpleDateFormat("yyyyMMdd");
private static DateFormat dfTime = new SimpleDateFormat("HHmmss");
private Spider spider;
public SpiderRunner(Spider spider) {
this.spider = spider;
}
public void submit() {
es.submit(this);
}
@Override
public T call() throws Exception {
String spiderName = spider.getClass().getSimpleName().toLowerCase().replaceAll("spider", "");
String[] cityNameArr = spider.getCityNameArr();
if (null == cityNameArr || cityNameArr.length < 1 || StringUtils.isBlank(cityNameArr[0])) {
Set<String> citySet = CityUtil.getAllCityNameBySpider(SpiderEnum.valueOf(spiderName));
if (null != citySet) {
cityNameArr = citySet.toArray(new String[] {});
}
}
T infoAllList = (T) new ArrayList<CarVo>();
for (String cityName : cityNameArr) {
long startTime = System.currentTimeMillis();
List<CarVo> infoList = spider.listByCityName(cityName);
long endTime = System.currentTimeMillis();
String timeStr = dfDate.format(new Date(startTime)) + " " + dfTime.format(new Date(startTime)) + "_" + dfTime.format(new Date(endTime));
SpiderBase.logToFile("elapse_" + dfDate.format(new Date()), "[" + timeStr + "] [" + spiderName + "] [" + cityName + "] get " + infoList.size() + " cars, elapse: " + ((endTime - startTime) / 1000) + "s");
infoAllList.addAll(infoList);
}
callback(infoAllList);
return infoAllList;
}
public void callback(List<CarVo> list) {
for (CarVo vo : list) {
// SpiderBase.logToFile(dfDate.format(new Date()) + "/" + vo.getSrc().name() + ".txt", JSON.toJSONString(vo));
}
}
}
|
package com.google.sps.servlets;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.util.Map;
import java.util.HashMap;
import java.lang.reflect.Type;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.google.sps.data.UrlRequest;
/** Servlet that gets civic information. */
@WebServlet("/civic")
public class CivicServlet extends HttpServlet {
private static final String API_KEY = "API_KEY"; // Insert the API_KEY here for testing.
private static final String API_PATH = "https:
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
String latitude = request.getParameter("lat");
String longitude = request.getParameter("lng");
Map<String, String> locationQueryParams = new HashMap<>();
locationQueryParams.put("lat", latitude);
locationQueryParams.put("lng", longitude);
String locationUrl = String.format("%s://%s:%s/location",
request.getScheme(),
request.getServerName(),
request.getServerPort());
String address = "";
try {
String locationJsonResult = UrlRequest.urlQuery(locationUrl, locationQueryParams);
Gson gson = new Gson();
Type mapType = new TypeToken<Map<String, String>>() {}.getType();
Map<String, String> locationMap = gson.fromJson(locationJsonResult, mapType);
address = String.format("%s %s, %s, %s %s",
locationMap.get("Street Number"),
locationMap.get("Street Name"),
locationMap.get("City"),
locationMap.get("State"),
locationMap.get("Zip Code"));
} catch (FileNotFoundException fnfe) {
// Location URL doesn't work on devserver, so instead use hardcoded string.
fnfe.printStackTrace();
address = "1 LMU Dr, Los Angeles, California";
} finally {
Map<String, String> apiQueryParams = new HashMap<>();
apiQueryParams.put("key", API_KEY);
apiQueryParams.put("address", address);
String jsonResult = UrlRequest.urlQuery(API_PATH, apiQueryParams);
response.setContentType("application/json;");
response.getWriter().println(jsonResult);
}
}
}
|
package com.google.sps.servlets;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.cloud.tasks.v2.AppEngineHttpRequest;
import com.google.cloud.tasks.v2.CloudTasksClient;
import com.google.cloud.tasks.v2.HttpMethod;
import com.google.cloud.tasks.v2.QueueName;
import com.google.cloud.tasks.v2.Task;
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.ByteString;
import java.io.IOException;
import java.nio.charset.Charset;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/queueEnvPull")
public class QueueEnvPull extends HttpServlet {
@VisibleForTesting
protected CloudTasksClient getClient() throws IOException {
return CloudTasksClient.create();
}
@VisibleForTesting
protected String getProjectID() {
return System.getenv("GOOGLE_CLOUD_PROJECT");
}
@VisibleForTesting
protected String getQueueName() {
return System.getenv("EXECUTION_QUEUE_ID");
}
@VisibleForTesting
protected String getLocation() {
return System.getenv("LOCATION_ID");
}
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
String classID = req.getParameter("classID");
String image = req.getParameter("image");
String tag = req.getParameter("tag");
DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
Entity e = new Entity("Environment");
e.setProperty("status", "pulling");
e.setProperty("class", KeyFactory.stringToKey(classID));
datastore.put(e);
String envID = KeyFactory.keyToString(e.getKey());
try (CloudTasksClient client = getClient()) {
String queuePath = QueueName.of(getProjectID(), getLocation(), getQueueName()).toString();
Task.Builder taskBuilder =
Task.newBuilder()
.setAppEngineHttpRequest(
AppEngineHttpRequest.newBuilder()
.setBody(
ByteString.copyFrom(
String.join(",", envID, classID, image, tag),
Charset.defaultCharset()))
.setRelativeUri("/tasks/pullEnv")
.setHttpMethod(HttpMethod.POST)
.build());
client.createTask(queuePath, taskBuilder.build());
resp.getWriter().print(envID);
}
}
}
|
package com.herocc.bukkit.core.api;
import com.herocc.bukkit.core.Core;
import org.bukkit.ChatColor;
import org.bukkit.entity.Player;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
public class FreezeAPI {
private final Core plugin;
public FreezeAPI(Core plugin) { this.plugin = plugin; }
PotionEffect noJump = PotionEffectType.JUMP.createEffect(999999, 128); // 128 = -1, prevents jumping
public static List<UUID> frozen = new ArrayList<>();
public void unfreezePlayer(Player player){
player.setWalkSpeed(0.2F);
player.removePotionEffect(PotionEffectType.JUMP);
frozen.remove(player.getUniqueId());
}
public void freezePlayer(Player player){
player.setWalkSpeed(0);
player.addPotionEffect(noJump);
frozen.add(player.getUniqueId());
}
public void unfreezeAll(Boolean alert) {
if (frozen.size() != 0) {
plugin.getLogger().info("Unfreezing all players...");
for (UUID uuid : frozen) {
Player player = plugin.getServer().getPlayer(uuid);
unfreezePlayer(player);
plugin.getLogger().fine("Unfroze " + player.getDisplayName());
if (alert) {
player.sendMessage(ChatColor.GREEN + "All players are now unfrozen!");
}
}
plugin.getLogger().info("All players unfrozen!");
}
}
}
|
package com.javatao.jkami.support;
import java.io.Serializable;
import java.sql.Array;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.javatao.jkami.ContextBeanHolder;
import com.javatao.jkami.JkException;
import com.javatao.jkami.Page;
import com.javatao.jkami.RunConfing;
import com.javatao.jkami.jdbc.BeanListHandle;
import com.javatao.jkami.jdbc.JdbcTypesUtils;
import com.javatao.jkami.jdbc.MapListHandle;
import com.javatao.jkami.jdbc.NumberHandle;
import com.javatao.jkami.jdbc.ResultHandle;
import com.javatao.jkami.utils.FKParse;
import com.javatao.jkami.utils.JkBeanUtils;
import com.javatao.jkami.utils.SqlUtils;
/**
* jdbc
*
* @author tao
*/
public class DataMapper {
private static DataMapper mapper;
private static final Log logger = LogFactory.getLog(DataMapper.class);
private static final String DEFAULT_BEAN = "this";
private static final String EMPTY = "";
private static final String SEMICOLON = ";";
public static final String DATABSE_TYPE_MYSQL = "mysql";
public static final String DATABSE_TYPE_POSTGRE = "postgresql";
public static final String DATABSE_TYPE_ORACLE = "oracle";
public static final String DATABSE_TYPE_SQLSERVER = "sqlserver";
/**
* SQL
*/
// mysql select * from ( {0} ) page_tab limit {1},{2}
private static final String MYSQL_SQL = "select * from ( {0} ) page_kami_tab limit ?,?";
// postgresql {2} offset {1}
private static final String POSTGRE_SQL = "select * from ( {0} ) page_kami_tab limit ? offset ?";
// oracle where rownum <= {1}) where rownum_>{2}
private static final String ORACLE_SQL = "select * from (select row_.*,rownum rownum_ from ({0}) row_ where rownum <= ?) where rownum_> ? ";
private static final String SQLSERVER_SQL = "select * from ( select row_number() over(order by tempColumn) tempRowNumber, * from (select top ? tempColumn = 0, {0} ) t ) tt where tempRowNumber > ? "; // sqlserver
private static final String COUNT_SQL = "select count(1) from ( {0} ) conut_kami_tab"; // count_sql
// oracle
private static final String SEQUENCE_SQL = "select {0}.nextval from dual"; // sequence_sql
private static final Pattern pat = Pattern.compile(":[ tnx0Bfr]*[a-z.A-Z]+");
private static final String numberRegex = "^:\\d+$";
/**
* Connection
*
* @return Connection
*/
private Connection getCon() {
return RunConfing.getConfig().getConnection();
}
/**
*
*
* @param con
* Connection
*/
private void doReleaseConnection(Connection con) {
RunConfing.getConfig().doReleaseConnection(con);
}
/**
* DataMapper
*
* @return DataMapper
*/
public static DataMapper getMapper() {
if (mapper == null) {
mapper = new DataMapper();
}
return mapper;
}
/**
*
*
* @param ps
* PreparedStatement
* @param args
*
* @throws SQLException
* SQLException
*/
private void setPSValue(PreparedStatement ps, Object... args) throws SQLException {
if (args != null && args.length > 0) {
if (args[0] instanceof Collection) {
args = ((Collection<?>) args[0]).toArray();
}
for (int i = 0; i < args.length; i++) {
Object value = args[i];
int index = i + 1;
psValue(ps, index, value);
if (logger.isDebugEnabled()) {
logger.debug("params[" + i + ":" + value + "]");
}
}
}
}
/**
*
*
* @param ps
* PreparedStatement
* @param entity
*
* @return
*/
private int setPSEntityValue(PreparedStatement ps, Object entity) {
try {
Class<?> classType = entity.getClass();
List<String> fileds = SqlUtils.getEntityAttrMp(classType);
for (int i = 0; i < fileds.size(); i++) {
String fd = fileds.get(i);
Object value = JkBeanUtils.getPropertyValue(entity, fd);
int index = i + 1;
if (value == null) {
ps.setNull(index, JdbcTypesUtils.getJdbcType(value));
} else {
psValue(ps, index, value);
}
if (logger.isDebugEnabled()) {
logger.debug("params[" + i + ":" + value + "]");
}
}
return fileds.size();
} catch (Exception e) {
throw new JkException(e);
}
}
/**
*
*
* @param ps
* PreparedStatement
* @param index
*
* @param value
*
* @throws SQLException
*
*/
private void psValue(PreparedStatement ps, int index, Object value) throws SQLException {
if (value instanceof Date) {
ps.setTimestamp(index, new java.sql.Timestamp(((Date) value).getTime()));
} else if (value instanceof Collection) {
Collection<?> coll = (Collection<?>) value;
Connection conn = RunConfing.getConfig().getConnection();
String jdbcType = JdbcTypesUtils.getJdbcType(coll);
Array array = conn.createArrayOf(jdbcType, coll.toArray());
ps.setArray(index, array);
} else if (value!=null&&value.getClass().isEnum()) {
ps.setString(index, value.toString());
} else {
ps.setObject(index, value);
}
}
/**
*
*
* @param o
*
* @return
*/
public int save(Object o) {
RunConfing config = RunConfing.getConfig();
Connection con = config.getConnection();
Class<?> classType = o.getClass();
String sql = SqlUtils.getSqls(classType, SqlUtils.TYPE.INSERT);
try {
Object[] key = getTableKey(classType);
Object idValue = JkBeanUtils.getPropertyValue(o, (String) key[0]);
String dbType = config.getDbType();
if (idValue == null) {
if (dbType.contains(DATABSE_TYPE_ORACLE)) {
String s = SqlUtils.getSequenceGeneratorVal(classType);
if (s != null) {
String sequenceSql = MessageFormat.format(SEQUENCE_SQL, s);
idValue = query(sequenceSql, new NumberHandle<Long>());
if (idValue != null) {
JkBeanUtils.setProperty(o, (String) key[1], idValue);
}
}
}
}
PreparedStatement ps = con.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS);
setPSEntityValue(ps, o);
int n = ps.executeUpdate();
if (idValue == null) {
ResultSet rs = ps.getGeneratedKeys();
ResultHandle<Long> handle = new NumberHandle<Long>();
idValue = handle.handle(rs);
JkBeanUtils.setProperty(o, (String) key[1], idValue);
}
ps.close();
return n;
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
doReleaseConnection(con);
}
}
/**
*
*
* @param <T>
*
* @param sql
* sql
* @param result
*
* @param currentDepth
*
* @param maxDepth
*
* @param params
*
* @return
*/
public <T> T queryForObject(String sql, Class<T> result, int currentDepth, int maxDepth, Object... params) {
List<T> list = query(sql, new BeanListHandle<>(result, currentDepth, maxDepth), params);
if (list != null) {
if (list.size() > 1) {
throw new JkException("queryForObject size = " + list.size() + " not one");
}
if (list.size() == 1) {
return list.get(0);
}
}
return null;
}
/**
*
*
* @param <E>
*
* @param sql
* sql
* @param handle
*
* @param params
*
* @return
*/
public <E> E query(String sql, ResultHandle<E> handle, Object... params) {
Connection con = getCon();
try {
if (logger.isDebugEnabled()) {
logger.debug(sql);
}
PreparedStatement ps = con.prepareStatement(sql);
setPSValue(ps, params);
ResultSet rs = ps.executeQuery();
E rows = handle.handle(rs);
ps.close();
return rows;
} catch (Exception e) {
throw new JkException(e);
} finally {
doReleaseConnection(con);
}
}
/**
*
*
* @param <T>
*
* @param result
*
* @param sqlParameter
* sql parameter
* @return
*/
public <T> T findOne(Class<T> result, Map<String, Object> sqlParameter) {
Connection con = getCon();
StringBuilder sb = new StringBuilder(SqlUtils.getSqls(result, SqlUtils.TYPE.SELECT));
try {
sb.append(" where 1=1 ");
Map<String, String> filedMap = SqlUtils.getEntityFiledColumnMap(result);
List<Object> values = new ArrayList<>();
for (String ky : sqlParameter.keySet()) {
String col = filedMap.get(ky);
if (col == null) {
col = ky;
}
sb.append(" and " + col + " = ? ");
values.add(sqlParameter.get(ky));
}
String sql = sb.toString();
int maxDepth = JkBeanUtils.getMaxDepth(result);
return queryForObject(sql, result, 1, maxDepth, values);
} catch (Exception e) {
throw new JkException(e);
} finally {
doReleaseConnection(con);
}
}
/**
*
*
* @param sql
* sql
* @param params
*
* @return
*/
public List<Map<String, Object>> queryForMap(String sql, Object... params) {
return query(sql, new MapListHandle(), params);
}
/**
* id
*
* @param <T>
*
* @param id
* ID
* @param classType
*
* @return
*/
public <T> T findById(Serializable id, Class<T> classType) {
Connection con = getCon();
String sql = SqlUtils.getSqls(classType, SqlUtils.TYPE.SELECT);
try {
Object[] key = getTableKey(classType);
sql += " where " + key[1] + " = ? ";
int maxDepth = JkBeanUtils.getMaxDepth(classType);
return queryForObject(sql, classType, 1, maxDepth, id);
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
doReleaseConnection(con);
}
}
/**
*
*
* @param <T>
*
* @param o
*
* @return
*/
public <T> int updateById(T o) {
Class<?> classType = o.getClass();
String sql = SqlUtils.getSqls(classType, SqlUtils.TYPE.UPDATE);
Object[] key = getTableKey(classType);
sql += " where " + key[1] + " = ? ";
Connection con = getCon();
try {
if (logger.isDebugEnabled()) {
logger.debug(sql);
}
PreparedStatement ps = con.prepareStatement(sql);
Object kv = JkBeanUtils.getPropertyValue(o, (String) key[0]);
int psSize = setPSEntityValue(ps, o);
ps.setObject(psSize + 1, kv);
int n = ps.executeUpdate();
ps.close();
return n;
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
doReleaseConnection(con);
}
}
/**
*
*
* @param sql
* sql
* @param params
*
* @return
*/
public int executeUpdate(String sql, Object... params) {
Connection con = getCon();
try {
if (logger.isDebugEnabled()) {
logger.debug(sql);
}
PreparedStatement ps = con.prepareStatement(sql);
setPSValue(ps, params);
int n = ps.executeUpdate();
ps.close();
return n;
} catch (Exception e) {
throw new JkException(e);
} finally {
doReleaseConnection(con);
}
}
/**
*
*
* @param sqls
* sql
* @param params
*
* @return
*/
public Object executeBatchUpdate(String sqls, Object... params) {
Connection con = getCon();
try {
if (sqls.indexOf(SEMICOLON) == -1) {
return executeUpdate(sqls, params);
}
String[] sqlss = sqls.replaceAll("\r|\n", EMPTY).split(SEMICOLON);
int length = sqlss.length;
int runsize = sqlss.length;
Statement st = con.createStatement();
for (int i = 0; i < length; i++) {
String sql = sqlss[i];
if (sql == null || sql.isEmpty() || sql.trim().isEmpty()) {
runsize
continue;
}
if (logger.isDebugEnabled()) {
logger.debug("addBatch: " + sql);
}
st.addBatch(sql);
}
if (logger.isDebugEnabled()) {
logger.debug("executeBatchUpdate size: " + runsize);
}
int[] batch = st.executeBatch();
st.close();
return batch;
} catch (Exception e) {
throw new JkException(e);
} finally {
doReleaseConnection(con);
}
}
/**
* id <br>
* delete from tabel where id=?
*
* @param <T>
*
* @param id
* id
* @param classType
*
* @return
*/
public <T> int deleteById(Serializable id, Class<T> classType) {
Object[] key = SqlUtils.getTableKey(classType);
StringBuilder sb = new StringBuilder("delete from ");
sb.append(SqlUtils.getTableName(classType));
sb.append(" where " + key[1] + " = ? ");
String sql = sb.toString();
return executeUpdate(sql, id);
}
/**
*
*
* @param <T>
*
* @param o
*
* @return
*/
public <T> int updateNotNullById(T o) {
Class<?> classType = o.getClass();
StringBuilder sb = new StringBuilder("update ");
sb.append(SqlUtils.getTableName(classType));
sb.append(" set ");
List<String> attrs = SqlUtils.getEntityAttrMp(classType);
Map<String, String> filedMap = SqlUtils.getEntityFiledColumnMap(classType);
List<Object> values = new ArrayList<>();
boolean isFirst = true;
for (int i = 0; i < attrs.size(); i++) {
String attr = attrs.get(i);
Object value = JkBeanUtils.getPropertyValue(o, attr);
if (value != null) {
if (!isFirst) {
sb.append(",");
}
sb.append(filedMap.get(attr) + " = ? ");
values.add(value);
isFirst = false;
}
}
Object[] key = getTableKey(classType);
sb.append(" where " + key[1] + " = ? ");
Object kv = JkBeanUtils.getPropertyValue(o, (String) key[0]);
values.add(kv);
String sql = sb.toString();
return executeUpdate(sql, values);
}
/**
*
*
* @param <T>
*
* @param sql
* sql
* @param classType
*
* @param page
*
* @return
*/
public <T> Page<T> findPage(String sql, Class<T> classType, Page<T> page, List<Object> params) {
if (page == null) {
page = new Page<>();
}
if (sql == null) {
sql = SqlUtils.getSelectSqls(classType);
}
RunConfing config = RunConfing.getConfig();
String dbType = config.getDbType();
if (params == null) {
params = new ArrayList<>();
}
String sqlParam = SqlUtils.getSearchParames(classType, page, params);
sql += sqlParam;
String pageSql = createPageSql(dbType, sql, page.getPage(), page.getSize(), params);
String countSql = MessageFormat.format(COUNT_SQL, sql);
int maxDepth = JkBeanUtils.getMaxDepth(classType);
List<T> result = query(pageSql, new BeanListHandle<>(classType, 1, maxDepth), params);
params.remove(params.size() - 1);
params.remove(params.size() - 1);
Long total = query(countSql, new NumberHandle<>(), params);
page.setRows(result);
page.setTotal(total);
return page;
}
/**
* SQL
*
* @param dbType
*
* @param sql
* sql
* @param page
*
* @param rows
*
* @param params
*
* @return sql
*/
public static String createPageSql(String dbType, String sql, int page, int rows, List<Object> params) {
int beginNum = (page - 1) * rows;
if (JkBeanUtils.isBlank(dbType)) {
throw new RuntimeException("(:dbType) ,");
}
if (dbType.indexOf(DATABSE_TYPE_MYSQL) > -1) {
sql = MessageFormat.format(MYSQL_SQL, sql);
params.add(beginNum);
params.add(rows);
} else if (dbType.indexOf(DATABSE_TYPE_POSTGRE) > -1) {
sql = MessageFormat.format(POSTGRE_SQL, sql);
params.add(rows);
params.add(beginNum);
} else {
int endIndex = beginNum + rows;
params.add(endIndex);
params.add(beginNum);
if (dbType.indexOf(DATABSE_TYPE_ORACLE) > -1) {
sql = MessageFormat.format(ORACLE_SQL, sql);
} else if (dbType.indexOf(DATABSE_TYPE_SQLSERVER) > -1) {
sql = MessageFormat.format(SQLSERVER_SQL, sql);
}
}
return sql;
}
/**
* - Map
*
* @param executeSql
* sql
* @param sqlParamsMap
*
* @param result
*
* @param k
*
* @return sql
*/
public String placeholderSqlParam(String executeSql, Map<String, Object> sqlParamsMap, List<Object> result, String... k) {
String key = EMPTY;
if (k != null && k.length > 0) {
key = k[0];
}
// sql ${...}
if (sqlParamsMap != null && !sqlParamsMap.isEmpty()) {
executeSql = FKParse.parseTemplateContent(executeSql, sqlParamsMap);
}
Matcher m = pat.matcher(executeSql);
while (m.find()) {
String match = m.group();
if (match.matches(numberRegex)) {
continue;
}
executeSql = executeSql.replace(match, "?");
match = match.replace(":", key).trim();
Object val = sqlParamsMap.get(match);
if (val == null) {
val = FKParse.parseTemplateContent("${" + match + "}", sqlParamsMap);
}
result.add(val);
if (logger.isDebugEnabled()) {
logger.debug(" Match [" + match + "] at positions " + m.start() + "-" + (m.end() - 1) + " value:" + val);
}
}
return executeSql;
}
/**
* - Map
*
* @param executeSql
* sql
* @param result
*
* @param value
*
* @return sql
*/
public String placeholderSqlParam(String executeSql, List<Object> result, Object value) {
Map<String, Object> sqlParamsMap = ContextBeanHolder.getSqlParams();
sqlParamsMap.put(DEFAULT_BEAN, value);
return placeholderSqlParam(executeSql, sqlParamsMap, result, DEFAULT_BEAN + ".");
}
/**
*
*
* @param classType
* @return
* [0] name
* [1] cloumn
* [2] type
*/
private Object[] getTableKey(Class<?> classType) {
Object[] key = SqlUtils.getTableKey(classType);
if (key == null) {
throw new RuntimeException(classType.getPackage() + classType.getName() + "no @key annotation ");
}
return key;
}
}
|
package com.libqa.web.domain;
/**
* @Author : yion
* @Date : 2015. 10. 11.
* @Description :
*/
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.extern.slf4j.Slf4j;
import org.springframework.security.web.authentication.rememberme.PersistentRememberMeToken;
import javax.persistence.*;
import java.util.Date;
/**
* @Author : yion
* @Date : 2015. 9. 14.
* @Description :
*/
@Data
@Entity
@Table(name = "persistent_login")
@Slf4j
@EqualsAndHashCode
public class PersistentLogin {
@Id
@Column(length = 64, nullable = false)
private String series;
@Column(length = 64, nullable = false)
private String username;
@Column(length = 64, nullable = false)
private String token;
@Temporal(TemporalType.TIMESTAMP)
@Column(nullable = false)
private Date lastUsed;
public PersistentLogin(PersistentRememberMeToken persistentRememberMeToken) {
this.username = persistentRememberMeToken.getUsername();
this.series = persistentRememberMeToken.getSeries();
this.lastUsed = persistentRememberMeToken.getDate();
this.token = persistentRememberMeToken.getTokenValue();
}
}
|
package com.logentries.jul;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.StandardSocketOptions;
import java.nio.BufferOverflowException;
import java.nio.ByteBuffer;
import java.nio.channels.SocketChannel;
import java.nio.charset.Charset;
import java.text.MessageFormat;
import static java.util.logging.ErrorManager.CLOSE_FAILURE;
import static java.util.logging.ErrorManager.FORMAT_FAILURE;
import static java.util.logging.ErrorManager.GENERIC_FAILURE;
import static java.util.logging.ErrorManager.OPEN_FAILURE;
import static java.util.logging.ErrorManager.WRITE_FAILURE;
import java.util.logging.Formatter;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogManager;
import java.util.logging.LogRecord;
import java.util.logging.SimpleFormatter;
public final class LogentriesHandler extends Handler {
private String host;
private int port;
private byte[] token;
private boolean open;
private SocketChannel channel;
private ByteBuffer buffer;
private final byte[] newline = {0x0D, 0x0A};
private final byte space = 0x020;
public LogentriesHandler() {
configure();
connect();
buffer = ByteBuffer.allocate(1024);
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
public byte[] getToken() {
return token;
}
public void setToken(byte[] token) {
this.token = token;
}
@Override
public synchronized void publish(LogRecord record) {
if (open && isLoggable(record)) {
String msg = formatMessage(record);
if (!msg.isEmpty()) {
boolean filled = fillAndFlip(msg);
if (filled) {
boolean drained = drain();
if (!drained) {
System.err.println("java.util.logging.ErrorManager: Sending to logentries.com failed. Trying to reconnect once.");
connect();
if (open) {
filled = fillAndFlip(msg);
if (filled) {
drained = drain();
if (!drained) {
System.err.println("java.util.logging.ErrorManager: Unable to reconnect. Shutting handler down.");
close();
}
}
}
}
}
}
}
}
String formatMessage(LogRecord record) {
String msg = "";
try {
msg = getFormatter().format(record);
// replace line separators with unicode equivalent
msg = msg.replace(System.getProperty("line.separator"), "\u2028");
} catch (Exception e) {
reportError("Error while formatting.", e, FORMAT_FAILURE);
}
return msg;
}
boolean fillAndFlip(String formattedMessage) {
try {
buffer.clear();
buffer.put(token);
buffer.put(space);
buffer.put(formattedMessage.getBytes(Charset.forName("UTF-8")));
buffer.put(newline);
} catch (BufferOverflowException e) {
reportError("Buffer exceeds capacity", e, WRITE_FAILURE);
return false;
}
buffer.flip();
return true;
}
boolean drain() {
while (buffer.hasRemaining()) {
try {
channel.write(buffer);
} catch (Exception e) {
reportError("Error while writing channel.", e, WRITE_FAILURE);
return false;
}
}
return true;
}
void configure() {
String cname = getClass().getName();
setLevel(getLevelProperty(cname + ".level", Level.INFO));
setFormatter(getFormatterProperty(cname + ".formatter", new SimpleFormatter()));
setHost(getStringProperty(cname + ".host", "data.logentries.com"));
setPort(getIntProperty(cname + ".port", 514));
setToken(getBytesProperty(cname + ".token", ""));
}
void connect() {
try {
channel = SocketChannel.open();
channel.setOption(StandardSocketOptions.SO_KEEPALIVE, true);
channel.connect(new InetSocketAddress(host, port));
open = true;
} catch (IOException e) {
open = false;
reportError(MessageFormat.format("Error connection to host: {0}:{1}", host, port), e, OPEN_FAILURE);
}
}
@Override
public void flush() {}
@Override
public void close() throws SecurityException {
open = false;
buffer = null;
if (channel != null) {
try {
channel.close();
} catch (IOException e) {
reportError("Error while closing channel.", e, CLOSE_FAILURE);
}
}
}
// -- These methods are private in LogManager
Level getLevelProperty(String name, Level defaultValue) {
LogManager manager = LogManager.getLogManager();
String val = manager.getProperty(name);
if (val == null) {
return defaultValue;
}
Level l = Level.parse(val.trim());
return l != null ? l : defaultValue;
}
Formatter getFormatterProperty(String name, Formatter defaultValue) {
LogManager manager = LogManager.getLogManager();
String val = manager.getProperty(name);
try {
if (val != null) {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
Class<?> clz = cl.loadClass(val);
return (Formatter) clz.newInstance();
}
} catch (ReflectiveOperationException e) {
reportError(MessageFormat.format("Error reading property ''{0}''", name), e, GENERIC_FAILURE);
}
return defaultValue;
}
String getStringProperty(String name, String defaultValue) {
LogManager manager = LogManager.getLogManager();
String val = manager.getProperty(name);
if (val == null) {
return defaultValue;
}
return val.trim();
}
byte[] getBytesProperty(String name, String defaultValue) {
return getStringProperty(name, defaultValue).getBytes();
}
int getIntProperty(String name, int defaultValue) {
LogManager manager = LogManager.getLogManager();
String val = manager.getProperty(name);
if (val == null) {
return defaultValue;
}
try {
return Integer.parseInt(val.trim());
} catch (NumberFormatException e) {
reportError(MessageFormat.format("Error reading property ''{0}''", name), e, GENERIC_FAILURE);
return defaultValue;
}
}
}
|
/**
* This file was automatically generated by the Mule Development Kit
*/
package com.mulesoft.modules.cors;
import org.apache.commons.lang.StringUtils;
import org.mule.api.MuleEvent;
import org.mule.api.annotations.param.Default;
import org.mule.api.annotations.param.Optional;
import org.mule.api.store.ObjectStore;
import org.mule.api.store.ObjectStoreException;
import org.mule.api.store.ObjectStoreManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.mule.api.MuleMessage;
import org.mule.api.annotations.*;
import org.mule.api.callback.SourceCallback;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import java.util.List;
/**
* Cloud Connector
*
* @author MuleSoft, Inc.
*/
@Module(name="cors", schemaVersion="1.0-SNAPSHOT")
public class CORSModule
{
private static final Logger logger = LoggerFactory.getLogger(CORSModule.class);
@Inject
private ObjectStoreManager objectStoreManager;
/**
* The initial list of supported origins that will be introduced into the origins object store.
*/
@Configurable
@Optional
private List<Origin> origins;
/**
* The object store used for storing the origins.
*/
@Configurable
@Optional
private ObjectStore<Origin> originsStore;
@PostConstruct
public void initializeModule() throws ObjectStoreException{
//no object store configured.
if (this.originsStore == null) {
if (logger.isDebugEnabled()) logger.debug("No object store configured, defaulting to " + Constants.ORIGINS_OBJECT_STORE);
this.originsStore = objectStoreManager.getObjectStore(Constants.ORIGINS_OBJECT_STORE);
}
//setup all configured object stores.
if (this.origins == null) {
if (logger.isDebugEnabled()) logger.debug("No initial set of origins configured.");
return;
}
for(Origin o : origins) {
if (logger.isDebugEnabled()) {
logger.debug("Configuring origin: " + o.getUrl());
}
originsStore.store(o.getUrl(), o);
}
}
/**
* Perform CORS validation. This operation will add the necessary CORS headers to the response. If the request method
* is OPTIONS it will not perform further processing of the message.
*
* If this request is not a CORS request, then the processing will continue without altering the message.
*
* {@sample.xml ../../../doc/CORSModule-connector.xml.sample cors:validate}
*
* @param callback the source callback for continuing the execution.
* @param event the mule event.
* @param publicResource specifies if this resource should be publicly available regardless the origin.
* @param acceptsCredentials specifies whether the resource accepts credentials or not.
* @return the resulting event
* @throws Exception propagate any exception thrown by next message processors.
*/
@Processor(intercepting = true)
@Inject
public MuleMessage validate(SourceCallback callback, MuleEvent event, @Optional @Default("false")
boolean publicResource, @Optional @Default("false") boolean acceptsCredentials) throws Exception {
if (publicResource && acceptsCredentials) {
throw new IllegalArgumentException("Resource may not be public and accept credentials at the same time");
}
MuleMessage message = event.getMessage();
//read the origin
String origin = message.getInboundProperty(Constants.ORIGIN);
//if origin is not present, then not a CORS request
if (StringUtils.isEmpty(origin)) {
if (logger.isDebugEnabled()) logger.debug("Request is not a CORS request.");
return callback.processEvent(event).getMessage();
}
//read headers including those of the preflight
String method = message.getInboundProperty(Constants.HTTP_METHOD);
String requestMethod = message.getInboundProperty(Constants.REQUEST_METHOD);
String requestHeaders = message.getInboundProperty(Constants.REQUEST_HEADERS);
MuleEvent result = event;
//decide if we want to invoke the flow.
if (shouldInvokeFlow(origin, method, publicResource)) {
result = callback.processEvent(result);
} else {
//setting the response to null.
event.getMessage().setPayload(null);
}
//finally configure the CORS headers
configureCorsHeaders(event.getMessage(), method, origin, requestMethod, requestHeaders, publicResource, acceptsCredentials);
return result.getMessage();
}
private void configureCorsHeaders(MuleMessage message, String method, String origin, String requestMethod,
String requestHeaders, boolean publicResource, boolean acceptsCredentials) throws ObjectStoreException {
boolean isPreflight = StringUtils.equals(Constants.PREFLIGHT_METHOD, method);
//if the resource is public then we don't check
if (publicResource) {
message.setOutboundProperty(Constants.ACCESS_CONTROL_ALLOW_ORIGIN, "*");
//and if it is a preflight call
if (isPreflight) {
message.setOutboundProperty(Constants.ACCESS_CONTROL_ALLOW_METHODS, requestMethod);
message.setOutboundProperty(Constants.ACCESS_CONTROL_ALLOW_HEADERS, requestHeaders);
}
//no further processing
return;
}
//if origin is not present then don't add headers
if (!originsStore.contains(origin)) {
return;
}
Origin configuredOrigin = originsStore.retrieve(origin);
String checkMethod = isPreflight ? requestMethod : method;
//if the method is not present, then we don't allow.
if (configuredOrigin.getMethods() == null || !configuredOrigin.getMethods().contains(checkMethod)) {
return;
}
//add the allow origin
message.setOutboundProperty(Constants.ACCESS_CONTROL_ALLOW_ORIGIN, origin);
//if the resource accepts credentials
if (acceptsCredentials) {
message.setOutboundProperty(Constants.ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
//if this is not a preflight, then we don't want to add the other headers
if (!isPreflight) {
return;
}
//serialize the list of allowed methods
if (configuredOrigin.getMethods() != null) {
message.setOutboundProperty(Constants.ACCESS_CONTROL_ALLOW_METHODS, StringUtils.join(configuredOrigin.getMethods(), ", "));
}
//serialize the list of allowed headers
if (configuredOrigin.getHeaders() != null) {
message.setOutboundProperty(Constants.ACCESS_CONTROL_ALLOW_HEADERS, StringUtils.join(configuredOrigin.getHeaders(), ", "));
}
//serialize the list of allowed headers
if (configuredOrigin.getExposeHeaders() != null) {
message.setOutboundProperty(Constants.ACCESS_CONTROL_EXPOSE_HEADERS, StringUtils.join(configuredOrigin.getExposeHeaders(), ", "));
}
//set the configured max age for this origin
if (configuredOrigin.getAccessControlMaxAge() != null) {
message.setOutboundProperty(Constants.ACCESS_CONTROL_MAX_AGE, configuredOrigin.getAccessControlMaxAge());
}
}
private boolean shouldInvokeFlow(String origin, String method, boolean publicResource) throws ObjectStoreException {
//if it is the preflight request, then logic wont be invoked.
if (StringUtils.equals(Constants.PREFLIGHT_METHOD, method)) {
if (logger.isDebugEnabled()) logger.debug("OPTIONS header, will not continue processing.");
return false;
}
//if it is a public resource and not preflight, then let's do it :)
if (publicResource) {
return true;
}
if (!originsStore.contains(origin)) {
if (logger.isDebugEnabled()) logger.debug("Origin not configured: " + origin);
return false;
}
//verify the allowed methods.
Origin configuredOrigin = originsStore.retrieve(origin);
if (configuredOrigin.getMethods() != null) {
return configuredOrigin.getMethods().contains(method);
} else {
logger.warn("Configured origin has no methods. Not allowing the execution of the flow");
return false;
}
}
//GETTERS AND SETTERS
public ObjectStore<Origin> getOriginsStore() {
return originsStore;
}
public void setOriginsStore(ObjectStore<Origin> originsStore) {
this.originsStore = originsStore;
}
public List<Origin> getOrigins() {
return origins;
}
public void setOrigins(List<Origin> origins) {
this.origins = origins;
}
public ObjectStoreManager getObjectStoreManager() {
return objectStoreManager;
}
public void setObjectStoreManager(ObjectStoreManager objectStoreManager) {
this.objectStoreManager = objectStoreManager;
}
}
|
package com.ociweb.iot.hardware;
import static com.ociweb.iot.hardware.HardwareConnection.DEFAULT_AVERAGE_WINDOW_MS;
import java.util.Arrays;
import java.util.concurrent.locks.ReentrantLock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ociweb.gl.api.Behavior;
import com.ociweb.gl.api.MsgCommandChannel;
import com.ociweb.gl.api.MsgRuntime;
import com.ociweb.gl.impl.BuilderImpl;
import com.ociweb.gl.impl.ChildClassScanner;
import com.ociweb.gl.impl.schema.IngressMessages;
import com.ociweb.gl.impl.schema.MessagePubSub;
import com.ociweb.gl.impl.schema.MessageSubscription;
import com.ociweb.gl.impl.schema.TrafficAckSchema;
import com.ociweb.gl.impl.schema.TrafficOrderSchema;
import com.ociweb.gl.impl.schema.TrafficReleaseSchema;
import com.ociweb.gl.impl.stage.TrafficCopStage;
import com.ociweb.iot.hardware.impl.DirectHardwareAnalogDigitalOutputStage;
import com.ociweb.iot.hardware.impl.SerialDataReaderStage;
import com.ociweb.iot.hardware.impl.SerialDataWriterStage;
import com.ociweb.iot.hardware.impl.SerialInputSchema;
import com.ociweb.iot.hardware.impl.SerialOutputSchema;
import com.ociweb.iot.hardware.impl.edison.EdisonConstants;
import com.ociweb.iot.impl.AnalogListenerBase;
import com.ociweb.iot.impl.DigitalListenerBase;
import com.ociweb.iot.impl.I2CListenerBase;
import com.ociweb.iot.impl.ImageListenerBase;
import com.ociweb.iot.impl.RotaryListenerBase;
import com.ociweb.iot.impl.SerialListenerBase;
import com.ociweb.iot.maker.Baud;
import com.ociweb.iot.maker.FogRuntime;
import com.ociweb.iot.maker.Hardware;
import com.ociweb.iot.maker.PiImageListenerStage;
import com.ociweb.iot.maker.Port;
import com.ociweb.iot.transducer.AnalogListenerTransducer;
import com.ociweb.iot.transducer.DigitalListenerTransducer;
import com.ociweb.iot.transducer.I2CListenerTransducer;
import com.ociweb.iot.transducer.ImageListenerTransducer;
import com.ociweb.iot.transducer.RotaryListenerTransducer;
import com.ociweb.iot.transducer.SerialListenerTransducer;
import com.ociweb.pronghorn.iot.ReactiveIoTListenerStage;
import com.ociweb.pronghorn.iot.ReadDeviceInputStage;
import com.ociweb.pronghorn.iot.i2c.I2CBacking;
import com.ociweb.pronghorn.iot.i2c.I2CJFFIStage;
import com.ociweb.pronghorn.iot.i2c.impl.I2CNativeLinuxBacking;
import com.ociweb.pronghorn.iot.rs232.RS232Client;
import com.ociweb.pronghorn.iot.rs232.RS232Clientable;
import com.ociweb.pronghorn.iot.schema.GroveRequestSchema;
import com.ociweb.pronghorn.iot.schema.GroveResponseSchema;
import com.ociweb.pronghorn.iot.schema.I2CCommandSchema;
import com.ociweb.pronghorn.iot.schema.I2CResponseSchema;
import com.ociweb.pronghorn.iot.schema.ImageSchema;
import com.ociweb.pronghorn.network.schema.ClientHTTPRequestSchema;
import com.ociweb.pronghorn.network.schema.NetPayloadSchema;
import com.ociweb.pronghorn.network.schema.NetResponseSchema;
import com.ociweb.pronghorn.pipe.Pipe;
import com.ociweb.pronghorn.pipe.PipeConfig;
import com.ociweb.pronghorn.pipe.util.hash.IntHashTable;
import com.ociweb.pronghorn.stage.route.ReplicatorStage;
import com.ociweb.pronghorn.stage.scheduling.GraphManager;
import com.ociweb.pronghorn.stage.test.PipeCleanerStage;
import com.ociweb.pronghorn.util.math.PMath;
import com.ociweb.pronghorn.util.math.ScriptedSchedule;
public abstract class HardwareImpl extends BuilderImpl implements Hardware {
private static final int MAX_MOVING_AVERAGE_SUPPORTED = 101; //TOOD: is this still needed, remove???
private static final HardwareConnection[] EMPTY = new HardwareConnection[0];
protected boolean configCamera = false;
protected boolean configI2C; //Humidity, LCD need I2C address so..
protected long debugI2CRateLastTime;
protected HardwareConnection[] digitalInputs; //Button, Motion
protected HardwareConnection[] digitalOutputs;//Relay Buzzer
protected HardwareConnection[] analogInputs; //Light, UV, Moisture
protected HardwareConnection[] pwmOutputs; //Servo //(only 3, 5, 6, 9, 10, 11 when on edison)
protected I2CConnection[] i2cInputs;
protected I2CConnection[] i2cOutputs;
private static final int DEFAULT_LENGTH = 16;
private static final int DEFAULT_PAYLOAD_SIZE = 128;
private static final boolean DEFAULT_EVERY_VALUE = false;
private int i2cBus;
protected I2CBacking i2cBackingInternal;
protected static final long MS_TO_NS = 1_000_000;
private static final Logger logger = LoggerFactory.getLogger(HardwareImpl.class);
protected final IODevice[] deviceOnPort= new IODevice[Port.values().length];
///Pipes for initial startup declared subscriptions. (Not part of graph)
private final int maxStartupSubs = 64;
private final int maxTopicLengh = 128;
private Pipe<MessagePubSub> tempPipeOfStartupSubscriptions;
protected ReentrantLock devicePinConfigurationLock = new ReentrantLock();
protected RS232Client rs232Client;
protected String rs232ClientDevice = "/dev/ttyMFD1";//custom hardware should override this edison value
protected Baud rs232ClientBaud = Baud.B_____9600;
protected String bluetoothDevice = null;
private static final boolean debug = false;
private int IDX_PIN = -1;
private int IDX_I2C = -1;
private int IDX_SER = -1;
private int imageFrameTriggerRateMillis = 33;
public void setImageTriggerRate(int triggerRateMillis) {
// TODO: No need for a minimum now? Test!
// if (triggerRateMillis < 1250) {
// throw new RuntimeException("Image listeners cannot be used with trigger rates of less than 1250 MS.");
this.imageFrameTriggerRateMillis = triggerRateMillis;
}
public IODevice getConnectedDevice(Port p) {
return deviceOnPort[p.ordinal()];
}
public HardwareImpl(GraphManager gm, String[] args, int i2cBus) {
this(gm, args, i2cBus, false, false, EMPTY,EMPTY,EMPTY,EMPTY,EMPTY);
}
protected HardwareImpl(GraphManager gm, String[] args, int i2cBus, boolean publishTime, boolean configI2C, HardwareConnection[] multiDigitalInput,
HardwareConnection[] digitalInputs, HardwareConnection[] digitalOutputs, HardwareConnection[] pwmOutputs, HardwareConnection[] analogInputs) {
super(gm, args);
ReactiveIoTListenerStage.initOperators(operators);
this.pcm.addConfig(new PipeConfig<NetPayloadSchema>(NetPayloadSchema.instance,
2, //only a few requests when FogLight
MINIMUM_TLS_BLOB_SIZE));
this.pcm.addConfig(new PipeConfig<SerialInputSchema>(SerialInputSchema.instance,
DEFAULT_LENGTH,
DEFAULT_PAYLOAD_SIZE));
this.i2cBus = i2cBus;
this.configI2C = configI2C; //may be removed.
this.digitalInputs = digitalInputs;
this.digitalOutputs = digitalOutputs;
this.pwmOutputs = pwmOutputs;
this.analogInputs = analogInputs;
this.getTempPipeOfStartupSubscriptions().initBuffers();
}
public I2CBacking getI2CBacking() {
if (null == i2cBackingInternal) {
i2cBackingInternal = getI2CBacking((byte)i2cBus, false);
}
return i2cBackingInternal;
}
private static I2CBacking getI2CBacking(byte deviceNum, boolean reportError) {
long start = System.currentTimeMillis();
try {
return new I2CNativeLinuxBacking().configure(deviceNum);
} catch (Throwable t) {
if (reportError) {
logger.info("warning could not find the i2c bus", t);
}
//avoid non error case that is used to detect which hardware is running.
return null;
} finally {
logger.info("duration of getI2CBacking {} ", System.currentTimeMillis()-start);
}
}
protected HardwareConnection[] growHardwareConnections(HardwareConnection[] original, HardwareConnection toAdd) {
final int len = original.length;
//Validate that what we are adding is safe
int i = len;
while (--i>=0) {
if (original[i].register == toAdd.register) {
throw new UnsupportedOperationException("This connection "+toAdd.register+" already has attachment "+original[i].twig+" so the attachment "+toAdd.twig+" can not be added.");
}
}
//Grow the array
HardwareConnection[] result = new HardwareConnection[len+1];
System.arraycopy(original, 0, result, 0, len);
result[len] = toAdd;
return result;
}
protected I2CConnection[] growI2CConnections(I2CConnection[] original, I2CConnection toAdd){
if (null==original) {
return new I2CConnection[] {toAdd};
} else {
int l = original.length;
I2CConnection[] result = new I2CConnection[l+1];
System.arraycopy(original, 0, result, 0, l);
result[l] = toAdd;
return result;
}
}
protected Hardware internalConnectAnalog(IODevice t, int connection, int customRate, int customAverageMS, boolean everyValue) {
if (t.isInput()) {
assert(!t.isOutput());
analogInputs = growHardwareConnections(analogInputs, new HardwareConnection(t,connection, customRate, customAverageMS, everyValue));
} else {
assert(t.isOutput());
pwmOutputs = growHardwareConnections(pwmOutputs, new HardwareConnection(t,connection, customRate, customAverageMS, everyValue));
}
return this;
}
protected Hardware internalConnectDigital(IODevice t, int connection, int customRate, int customAverageMS, boolean everyValue) {
if (t.isInput()) {
assert(!t.isOutput());
digitalInputs = growHardwareConnections(digitalInputs, new HardwareConnection(t,connection, customRate, customAverageMS, everyValue));
} else {
assert(t.isOutput());
digitalOutputs = growHardwareConnections(digitalOutputs, new HardwareConnection(t,connection, customRate, customAverageMS, everyValue));
}
return this;
}
@Override
public Hardware connect(I2CIODevice t){
logger.debug("Connecting I2C Device "+t.getClass());
if(t.isInput()){
i2cInputs = growI2CConnections(i2cInputs, t.getI2CConnection());
}
if(t.isOutput()){
i2cOutputs = growI2CConnections(i2cOutputs, t.getI2CConnection());
}
this.useI2C();
return this;
}
@Override
public Hardware connect(I2CIODevice t, int customRateMS){
logger.debug("Connecting I2C Device "+t.getClass());
if(t.isInput()){
i2cInputs = growI2CConnections(i2cInputs, new I2CConnection(t.getI2CConnection(),customRateMS));
}
if(t.isOutput()){
i2cOutputs = growI2CConnections(i2cOutputs, t.getI2CConnection());
}
this.useI2C();
return this;
}
public Hardware useSerial(Baud baud) {
this.rs232ClientBaud = baud;
return this;
}
/**
*
* @param baud
* @param device Name of the port. On UNIX systems this will typically
* be of the form /dev/ttyX, where X is a port number. On
* Windows systems this will typically of the form COMX,
* where X is again a port number.
*/
public Hardware useSerial(Baud baud, String device) {
this.rs232ClientBaud = baud;
this.rs232ClientDevice = device;
return this;
}
public Hardware useI2C() {
this.configI2C = true;
return this;
}
public Hardware useCamera() {
this.configCamera = true;
return this;
}
@Deprecated //would be nice if we did not have to do this.
public Hardware useI2C(int bus) {
this.configI2C = true;
this.i2cBus = bus;
return this;
}
public boolean isUseI2C() {
return this.configI2C;
}
public abstract HardwarePlatformType getPlatformType();
public abstract int read(Port port); //Platform specific
public abstract void write(Port port, int value); //Platform specific
public int maxAnalogMovingAverage() {
return MAX_MOVING_AVERAGE_SUPPORTED;
}
public void coldSetup(){
System.out.println("");
}
protected HardwareConnection[] buildUsedLines() {
HardwareConnection[] result = new HardwareConnection[digitalInputs.length+
digitalOutputs.length+
pwmOutputs.length+
analogInputs.length+
(configI2C?2:0)];
int pos = 0;
System.arraycopy(digitalInputs, 0, result, pos, digitalInputs.length);
pos+=digitalInputs.length;
findDup(result,pos,digitalOutputs, false);
System.arraycopy(digitalOutputs, 0, result, pos, digitalOutputs.length);
pos+=digitalOutputs.length;
findDup(result,pos,pwmOutputs, false);
System.arraycopy(pwmOutputs, 0, result, pos, pwmOutputs.length);
pos+=pwmOutputs.length;
findDup(result,pos,analogInputs, true);
int j = analogInputs.length;
while (--j>=0) {
result[pos++] = new HardwareConnection(analogInputs[j].twig,(int) EdisonConstants.ANALOG_CONNECTOR_TO_PIN[analogInputs[j].register]);
}
if (configI2C) {
findDup(result,pos,EdisonConstants.i2cPins, false);
System.arraycopy(EdisonConstants.i2cPins, 0, result, pos, EdisonConstants.i2cPins.length);
pos+=EdisonConstants.i2cPins.length;
}
return result;
}
private static void findDup(HardwareConnection[] base, int baseLimit, HardwareConnection[] items, boolean mapAnalogs) {
int i = items.length;
while (--i>=0) {
int j = baseLimit;
while (--j>=0) {
if (mapAnalogs ? base[j].register == EdisonConstants.ANALOG_CONNECTOR_TO_PIN[items[i].register] : base[j]==items[i]) {
throw new UnsupportedOperationException("Connector "+items[i]+" is assigned more than once.");
}
}
}
}
public void shutdown() {
super.shutdown();
//can be overridden by specific hardware impl if shutdown is supported.
}
private void createUARTInputStage(Pipe<SerialInputSchema> masterUARTPipe) {
RS232Clientable client = buildSerialClient();
new SerialDataReaderStage(this.gm, masterUARTPipe, client);
}
protected RS232Clientable buildSerialClient() {
if (null==rs232Client) {
//custom hardware can override this
rs232Client = new RS232Client(rs232ClientDevice, rs232ClientBaud);
}
return rs232Client;
}
protected void createADInputStage(Pipe<GroveResponseSchema> masterResponsePipe) {
//NOTE: rate is NOT set since stage sets and configs its own rate based on polling need.
ReadDeviceInputStage adInputStage = new ReadDeviceInputStage(this.gm, masterResponsePipe, this);
}
protected void createI2COutputInputStage(MsgRuntime<?,?> runtime, Pipe<I2CCommandSchema>[] i2cPipes,
Pipe<TrafficReleaseSchema>[] masterI2CgoOut, Pipe<TrafficAckSchema>[] masterI2CackIn, Pipe<I2CResponseSchema> masterI2CResponsePipe) {
if (hasI2CInputs()) {
I2CJFFIStage i2cJFFIStage = new I2CJFFIStage(gm, runtime, masterI2CgoOut, i2cPipes, masterI2CackIn, masterI2CResponsePipe, this);
} else {
//TODO: build an output only version of this stage because there is nothing to read
I2CJFFIStage i2cJFFIStage = new I2CJFFIStage(gm, runtime, masterI2CgoOut, i2cPipes, masterI2CackIn, masterI2CResponsePipe, this);
}
}
protected void createADOutputStage(MsgRuntime<?,?> runtime, Pipe<GroveRequestSchema>[] requestPipes, Pipe<TrafficReleaseSchema>[] masterPINgoOut, Pipe<TrafficAckSchema>[] masterPINackIn) {
DirectHardwareAnalogDigitalOutputStage adOutputStage = new DirectHardwareAnalogDigitalOutputStage(gm, runtime, requestPipes, masterPINgoOut, masterPINackIn, this);
}
public boolean isListeningToSerial(Object listener) {
return listener instanceof SerialListenerBase
|| !ChildClassScanner.visitUsedByClass(listener, deepListener, SerialListenerTransducer.class);
}
public boolean isListeningToCamera(Object listener) {
return listener instanceof ImageListenerBase
|| !ChildClassScanner.visitUsedByClass(listener, deepListener, ImageListenerTransducer.class);
}
public boolean isListeningToI2C(Object listener) {
return listener instanceof I2CListenerBase
|| !ChildClassScanner.visitUsedByClass(listener, deepListener, I2CListenerTransducer.class);
}
public boolean isListeningToPins(Object listener) {
return listener instanceof DigitalListenerBase ||
listener instanceof AnalogListenerBase ||
listener instanceof RotaryListenerBase
|| !ChildClassScanner.visitUsedByClass(listener, deepListener, DigitalListenerTransducer.class)
|| !ChildClassScanner.visitUsedByClass(listener, deepListener, AnalogListenerTransducer.class)
|| !ChildClassScanner.visitUsedByClass(listener, deepListener, RotaryListenerTransducer.class);
}
private Pipe<MessagePubSub> getTempPipeOfStartupSubscriptions() {
if (null==tempPipeOfStartupSubscriptions) {
final PipeConfig<MessagePubSub> messagePubSubConfig = new PipeConfig<MessagePubSub>(MessagePubSub.instance, maxStartupSubs,maxTopicLengh);
tempPipeOfStartupSubscriptions = new Pipe<MessagePubSub>(messagePubSubConfig);
}
return tempPipeOfStartupSubscriptions;
}
public boolean hasI2CInputs() {
return this.i2cInputs!=null && this.i2cInputs.length>0;
}
public I2CConnection[] getI2CInputs() {
return null==i2cInputs?new I2CConnection[0]:i2cInputs;
}
public HardwareConnection[] getAnalogInputs() {
return analogInputs;
}
public HardwareConnection[] getDigitalInputs() {
return digitalInputs;
}
public ScriptedSchedule buildI2CPollSchedule() {
I2CConnection[] localInputs = getI2CInputs();
long[] schedulePeriods = new long[localInputs.length];
for (int i = 0; i < localInputs.length; i++) {
schedulePeriods[i] = localInputs[i].responseMS*MS_TO_NS;
}
logger.info("Known I2C rates: {}",Arrays.toString(schedulePeriods));
return PMath.buildScriptedSchedule(schedulePeriods);
}
public boolean hasDigitalOrAnalogInputs() {
return (analogInputs.length+digitalInputs.length)>0;
}
public boolean hasDigitalOrAnalogOutputs() {
return (pwmOutputs.length+digitalOutputs.length)>0;
}
public HardwareConnection[] combinedADConnections() {
HardwareConnection[] localAInputs = getAnalogInputs();
HardwareConnection[] localDInputs = getDigitalInputs();
int totalCount = localAInputs.length + localDInputs.length;
HardwareConnection[] results = new HardwareConnection[totalCount];
System.arraycopy(localAInputs, 0, results, 0, localAInputs.length);
System.arraycopy(localDInputs, 0, results, localAInputs.length, localDInputs.length);
return results;
}
public ScriptedSchedule buildADPollSchedule() {
HardwareConnection[] localAInputs = getAnalogInputs();
HardwareConnection[] localDInputs = getDigitalInputs();
int totalCount = localAInputs.length + localDInputs.length;
if (0==totalCount) {
return null;
}
long[] schedulePeriods = new long[totalCount];
int j = 0;
for (int i = 0; i < localAInputs.length; i++) {
schedulePeriods[j++] = localAInputs[i].responseMS*MS_TO_NS;
}
for (int i = 0; i < localDInputs.length; i++) {
schedulePeriods[j++] = localDInputs[i].responseMS*MS_TO_NS;
}
//analogs then the digitals
return PMath.buildScriptedSchedule(schedulePeriods);
}
public byte convertToPort(byte connection) {
return connection;
}
@Override
public Hardware connect(ADIODevice t, Port port, int customRateMS, int customAvgWindowMS, boolean everyValue) {
int portsLeft = t.pinsUsed();
while (--portsLeft >= 0){
deviceOnPort[port.ordinal()] = t;
if (0 != (port.mask&Port.IS_ANALOG)) {
internalConnectAnalog(t, port.port, customRateMS, customAvgWindowMS, everyValue);
}
else if (0 != (port.mask&Port.IS_DIGITAL)) {
internalConnectDigital(t, port.port, customRateMS, customAvgWindowMS, everyValue);
}
port = Port.nextPort(port);
}
return this;
}
@Override
public Hardware connect(ADIODevice t, Port port, int customRateMS, int customAvgWindowMS) {
return connect(t,port,customRateMS, customAvgWindowMS ,DEFAULT_EVERY_VALUE);
}
@Override
public Hardware connect(ADIODevice t, Port port, int customRateMS) {
return connect(t,port,customRateMS, DEFAULT_AVERAGE_WINDOW_MS ,false);
}
@Override
public Hardware connect(ADIODevice t, Port port, int customRateMS, boolean everyValue) {
return connect(t,port,customRateMS, DEFAULT_AVERAGE_WINDOW_MS ,everyValue);
}
@Override
public Hardware connect(ADIODevice t, Port port) {
return connect (t, port, t.defaultPullRateMS(),DEFAULT_AVERAGE_WINDOW_MS,false);
}
public void releasePinOutTraffic(int count, MsgCommandChannel<?> gcc) {
MsgCommandChannel.publishGo(count, IDX_PIN, gcc);
}
public void releaseI2CTraffic(int count, MsgCommandChannel<?> gcc) {
MsgCommandChannel.publishGo(count, IDX_I2C, gcc);
}
@Override
public void releasePubSubTraffic(int count, MsgCommandChannel<?> gcc) {
MsgCommandChannel.publishGo(count, IDX_MSG, gcc);
}
public void buildStages(MsgRuntime runtime) {
IntHashTable subscriptionPipeLookup2 = MsgRuntime.getSubPipeLookup(runtime);
GraphManager gm2 = MsgRuntime.getGraphManager(runtime);
Pipe<I2CResponseSchema>[] i2cResponsePipes = GraphManager.allPipesOfTypeWithNoProducer(gm2, I2CResponseSchema.instance);
Pipe<GroveResponseSchema>[] responsePipes = GraphManager.allPipesOfTypeWithNoProducer(gm2, GroveResponseSchema.instance);
Pipe<SerialOutputSchema>[] serialOutputPipes = GraphManager.allPipesOfTypeWithNoConsumer(gm2, SerialOutputSchema.instance);
Pipe<I2CCommandSchema>[] i2cPipes = GraphManager.allPipesOfTypeWithNoConsumer(gm2, I2CCommandSchema.instance);
Pipe<GroveRequestSchema>[] pinRequestPipes = GraphManager.allPipesOfTypeWithNoConsumer(gm2, GroveRequestSchema.instance);
Pipe<SerialInputSchema>[] serialInputPipes = GraphManager.allPipesOfTypeWithNoProducer(gm2, SerialInputSchema.instance);
Pipe<ImageSchema>[] imageInputPipes = GraphManager.allPipesOfTypeWithNoProducer(gm2, ImageSchema.instance);
Pipe<NetResponseSchema>[] httpClientResponsePipes = GraphManager.allPipesOfTypeWithNoProducer(gm2, NetResponseSchema.instance);
Pipe<MessageSubscription>[] subscriptionPipes = GraphManager.allPipesOfTypeWithNoProducer(gm2, MessageSubscription.instance);
Pipe<TrafficOrderSchema>[] orderPipes = GraphManager.allPipesOfTypeWithNoConsumer(gm2, TrafficOrderSchema.instance);
Pipe<ClientHTTPRequestSchema>[] httpClientRequestPipes = GraphManager.allPipesOfTypeWithNoConsumer(gm2, ClientHTTPRequestSchema.instance);
Pipe<MessagePubSub>[] messagePubSub = GraphManager.allPipesOfTypeWithNoConsumer(gm2, MessagePubSub.instance);
Pipe<IngressMessages>[] ingressMessagePipes = GraphManager.allPipesOfTypeWithNoConsumer(gm2, IngressMessages.instance);
//TODO: must pull out those pubSub Pipes for direct connections
//TODO: new MessageSchema for direct messages from point to point
// create the pipe instead of pub sub and attach?
//TODO: declare up front once in connections, direct connect topics
// upon seeing these we build a new pipe
int commandChannelCount = orderPipes.length;
int eventSchemas = 0;
IDX_PIN = pinRequestPipes.length>0 ? eventSchemas++ : -1;
IDX_I2C = i2cPipes.length>0 || i2cResponsePipes.length > 0 ? eventSchemas++ : -1; //the 'or' check is to ensure that reading without a cmd channel works
IDX_MSG = (IntHashTable.isEmpty(subscriptionPipeLookup2) && subscriptionPipes.length==0 && messagePubSub.length==0) ? -1 : eventSchemas++;
IDX_NET = useNetClient(httpClientRequestPipes) ? eventSchemas++ : -1;
IDX_SER = serialOutputPipes.length>0 ? eventSchemas++ : -1;
long timeout = 20_000; //20 seconds
//TODO: can we share this while with the parent BuilderImpl, I think so..
int maxGoPipeId = 0;
int t = commandChannelCount;
Pipe<TrafficReleaseSchema>[][] masterGoOut = new Pipe[eventSchemas][0];
Pipe<TrafficAckSchema>[][] masterAckIn = new Pipe[eventSchemas][0];
if (IDX_PIN >= 0) {
masterGoOut[IDX_PIN] = new Pipe[pinRequestPipes.length];
masterAckIn[IDX_PIN] = new Pipe[pinRequestPipes.length];
}
if (IDX_I2C >= 0) {
masterGoOut[IDX_I2C] = new Pipe[i2cPipes.length];
masterAckIn[IDX_I2C] = new Pipe[i2cPipes.length];
}
if (IDX_MSG >= 0) {
masterGoOut[IDX_MSG] = new Pipe[messagePubSub.length];
masterAckIn[IDX_MSG] = new Pipe[messagePubSub.length];
}
if (IDX_NET >= 0) {
masterGoOut[IDX_NET] = new Pipe[httpClientRequestPipes.length];
masterAckIn[IDX_NET] = new Pipe[httpClientRequestPipes.length];
}
if (IDX_SER >=0) {
masterGoOut[IDX_SER] = new Pipe[serialOutputPipes.length];
masterAckIn[IDX_SER] = new Pipe[serialOutputPipes.length];
}
while (--t>=0) {
int features = getFeatures(gm2, orderPipes[t]);
Pipe<TrafficReleaseSchema>[] goOut = new Pipe[eventSchemas];
Pipe<TrafficAckSchema>[] ackIn = new Pipe[eventSchemas];
boolean isDynamicMessaging = (features&Behavior.DYNAMIC_MESSAGING) != 0;
boolean isNetRequester = (features&Behavior.NET_REQUESTER) != 0;
boolean isPinWriter = (features&FogRuntime.PIN_WRITER) != 0;
boolean isI2CWriter = (features&FogRuntime.I2C_WRITER) != 0;
boolean isSerialWriter = (features&FogRuntime.SERIAL_WRITER) != 0;
boolean hasConnections = false;
if (isDynamicMessaging && IDX_MSG>=0) {
hasConnections = true;
maxGoPipeId = populateGoAckPipes(maxGoPipeId, masterGoOut, masterAckIn, goOut, ackIn, IDX_MSG);
}
if (isNetRequester && IDX_NET>=0) {
hasConnections = true;
maxGoPipeId = populateGoAckPipes(maxGoPipeId, masterGoOut, masterAckIn, goOut, ackIn, IDX_NET);
}
if (isPinWriter && IDX_PIN>=0) {
hasConnections = true;
maxGoPipeId = populateGoAckPipes(maxGoPipeId, masterGoOut, masterAckIn, goOut, ackIn, IDX_PIN);
}
if (isI2CWriter && IDX_I2C>=0) {
hasConnections = true;
maxGoPipeId = populateGoAckPipes(maxGoPipeId, masterGoOut, masterAckIn, goOut, ackIn, IDX_I2C);
}
if (isSerialWriter && IDX_SER>=0) {
hasConnections = true;
maxGoPipeId = populateGoAckPipes(maxGoPipeId, masterGoOut, masterAckIn, goOut, ackIn, IDX_SER);
}
if (true | hasConnections) {
TrafficCopStage trafficCopStage = new TrafficCopStage(gm, timeout, orderPipes[t], ackIn, goOut, runtime, this);
} else {
//this optimization can no longer be done due to the use of shutdown on command channel.
// revisit this later...
//TODO: we can reintroduce this as long has we have a stage here which does shutdown on -1;
PipeCleanerStage.newInstance(gm, orderPipes[t]);
}
}
initChannelBlocker(maxGoPipeId);
buildHTTPClientGraph(runtime, httpClientResponsePipes, httpClientRequestPipes, masterGoOut, masterAckIn);
if (IDX_MSG <0) {
logger.trace("saved some resources by not starting up the unused pub sub service.");
} else {
createMessagePubSubStage(
runtime,
subscriptionPipeLookup2, ingressMessagePipes,
messagePubSub,
masterGoOut[IDX_MSG], masterAckIn[IDX_MSG], subscriptionPipes);
}
//only build and connect I2C if it is used for either in or out
Pipe<I2CResponseSchema> masterI2CResponsePipe = null;
if (i2cResponsePipes.length>0) {
masterI2CResponsePipe = I2CResponseSchema.instance.newPipe(DEFAULT_LENGTH, DEFAULT_PAYLOAD_SIZE);
ReplicatorStage.newInstance(gm, masterI2CResponsePipe, i2cResponsePipes);
}
if (i2cPipes.length>0 || (null!=masterI2CResponsePipe)) {
createI2COutputInputStage(runtime, i2cPipes, masterGoOut[IDX_I2C], masterAckIn[IDX_I2C], masterI2CResponsePipe);
}
//only build and connect gpio input responses if it is used
if (responsePipes.length>1) {
Pipe<GroveResponseSchema> masterResponsePipe = GroveResponseSchema.instance.newPipe(DEFAULT_LENGTH, DEFAULT_PAYLOAD_SIZE);
ReplicatorStage.newInstance(gm, masterResponsePipe, responsePipes);
createADInputStage(masterResponsePipe);
} else {
if (responsePipes.length==1) {
createADInputStage(responsePipes[0]);
}
}
//only build serial output if data is sent
if (serialOutputPipes.length>0) {
assert(null!=masterGoOut[IDX_SER]);
assert(serialOutputPipes.length == masterGoOut[IDX_SER].length) : serialOutputPipes.length+" == "+masterGoOut[IDX_SER].length;
createSerialOutputStage(runtime, serialOutputPipes, masterGoOut[IDX_SER], masterAckIn[IDX_SER]);
}
//only build serial input if the data is consumed
if (serialInputPipes.length>1) {
Pipe<SerialInputSchema> masterUARTPipe = new Pipe<SerialInputSchema>(pcm.getConfig(SerialInputSchema.class));
new ReplicatorStage<SerialInputSchema>(gm, masterUARTPipe, serialInputPipes);
createUARTInputStage(masterUARTPipe);
} else {
if (serialInputPipes.length==1) {
createUARTInputStage(serialInputPipes[0]);
} else {
}
}
//only build image input if the data is consumed
// TODO: Is this where we determine what kind of platform to listen on (e.g., Edison, Pi)?
if (imageInputPipes.length > 1) {
Pipe<ImageSchema> masterImagePipe = ImageSchema.instance.newPipe(DEFAULT_LENGTH, DEFAULT_PAYLOAD_SIZE);
new ReplicatorStage<ImageSchema>(gm, masterImagePipe, imageInputPipes);
new PiImageListenerStage(gm, masterImagePipe, imageFrameTriggerRateMillis);
} else if (imageInputPipes.length == 1){
new PiImageListenerStage(gm, imageInputPipes[0], imageFrameTriggerRateMillis);
}
//only build direct pin output when we detected its use
if (IDX_PIN>=0) {
createADOutputStage(runtime, pinRequestPipes, masterGoOut[IDX_PIN], masterAckIn[IDX_PIN]);
}
}
private String featureName(final int c) {
if (c == IDX_I2C) {
//FogRuntime.I2C_WRITER;
return "I2C_WRITER";
}
if (c == IDX_MSG) {
//Behavior.DYNAMIC_MESSAGING;
return "DYNAMIC_MESSAGING";
}
if (c == IDX_NET) { //TODO: where is the responder??
//Behavior.NET_REQUESTER;
return "NET_REQUESTER";
}
if (c == IDX_PIN) {
//FogRuntime.PIN_WRITER;
return "PIN_WRITER";
}
if (c == IDX_SER) {
//FogRuntime.SERIAL_WRITER;
return "SERIAL_WRITER";
}
return null;
}
protected void createSerialOutputStage(MsgRuntime<?,?> runtime, Pipe<SerialOutputSchema>[] serialOutputPipes,
Pipe<TrafficReleaseSchema>[] masterGoOut, Pipe<TrafficAckSchema>[] masterAckIn) {
new SerialDataWriterStage(gm, runtime, serialOutputPipes, masterGoOut, masterAckIn,
this, this.buildSerialClient());
}
public static int serialIndex(HardwareImpl hardware) {
return hardware.IDX_SER;
}
public static int i2cIndex(HardwareImpl hardware) {
return hardware.IDX_I2C;
}
@Override
public int pubSubIndex() {
return IDX_MSG;
}
@Override
public int netIndex() {
return IDX_NET;
}
public boolean isTestHardware() {
return false;
}
}
|
// checkstyle: Checks Java source code for adherence to a set of rules.
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.puppycrawl.tools.checkstyle;
import com.puppycrawl.tools.checkstyle.api.Utils;
import com.google.common.collect.Lists;
import com.puppycrawl.tools.checkstyle.api.AuditListener;
import com.puppycrawl.tools.checkstyle.api.CheckstyleException;
import com.puppycrawl.tools.checkstyle.api.Configuration;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
/**
* Wrapper command line program for the Checker.
* @author Oliver Burn
**/
public final class Main
{
/** the options to the command line */
private static final Options OPTS = new Options();
static {
OPTS.addOption("c", true, "The check configuration file to use.");
OPTS.addOption("r", true, "Traverse the directory for source files");
OPTS.addOption("o", true, "Sets the output file. Defaults to stdout");
OPTS.addOption("p", true, "Loads the properties file");
OPTS.addOption(
"f",
true,
"Sets the output format. (plain|xml). Defaults to plain");
OPTS.addOption("v", false, "Print product version and exit");
}
/** Stop instances being created. */
private Main()
{
}
/**
* Loops over the files specified checking them for errors. The exit code
* is the number of errors found in all the files.
* @param aArgs the command line arguments
**/
public static void main(String[] aArgs)
{
// parse the parameters
final CommandLineParser clp = new PosixParser();
CommandLine line = null;
try {
line = clp.parse(OPTS, aArgs);
}
catch (final ParseException e) {
e.printStackTrace();
usage();
}
assert line != null;
// show version and exit
if (line.hasOption("v")) {
System.out.println("Checkstyle version: "
+ Main.class.getPackage().getImplementationVersion());
System.exit(0);
}
// setup the properties
final Properties props =
line.hasOption("p")
? loadProperties(new File(line.getOptionValue("p")))
: System.getProperties();
// ensure a config file is specified
if (!line.hasOption("c")) {
System.out.println("Must specify a config XML file.");
usage();
}
final Configuration config = loadConfig(line, props);
// setup the output stream
OutputStream out = null;
boolean closeOut = false;
if (line.hasOption("o")) {
final String fname = line.getOptionValue("o");
try {
out = new FileOutputStream(fname);
closeOut = true;
}
catch (final FileNotFoundException e) {
System.out.println("Could not find file: '" + fname + "'");
System.exit(1);
}
}
else {
out = System.out;
closeOut = false;
}
final AuditListener listener = createListener(line, out, closeOut);
final List<File> files = getFilesToProcess(line);
final Checker c = createChecker(config, listener);
final int numErrs = c.process(files);
c.destroy();
System.exit(numErrs);
}
/**
* Creates the Checker object.
*
* @param aConfig the configuration to use
* @param aNosy the sticky beak to track what happens
* @return a nice new fresh Checker
*/
private static Checker createChecker(Configuration aConfig,
AuditListener aNosy)
{
Checker c = null;
try {
c = new Checker();
final ClassLoader moduleClassLoader =
Checker.class.getClassLoader();
c.setModuleClassLoader(moduleClassLoader);
c.configure(aConfig);
c.addListener(aNosy);
}
catch (final Exception e) {
System.out.println("Unable to create Checker: "
+ e.getMessage());
e.printStackTrace(System.out);
System.exit(1);
}
return c;
}
/**
* Determines the files to process.
*
* @param aLine the command line options specifying what files to process
* @return list of files to process
*/
private static List<File> getFilesToProcess(CommandLine aLine)
{
final List<File> files = Lists.newLinkedList();
if (aLine.hasOption("r")) {
final String[] values = aLine.getOptionValues("r");
for (String element : values) {
traverse(new File(element), files);
}
}
final String[] remainingArgs = aLine.getArgs();
for (String element : remainingArgs) {
files.add(new File(element));
}
if (files.isEmpty() && !aLine.hasOption("r")) {
System.out.println("Must specify files to process");
usage();
}
return files;
}
/**
* Create the audit listener
*
* @param aLine command line options supplied
* @param aOut the stream to log to
* @param aCloseOut whether the stream should be closed
* @return a fresh new <code>AuditListener</code>
*/
private static AuditListener createListener(CommandLine aLine,
OutputStream aOut,
boolean aCloseOut)
{
final String format =
aLine.hasOption("f") ? aLine.getOptionValue("f") : "plain";
AuditListener listener = null;
if ("xml".equals(format)) {
listener = new XMLLogger(aOut, aCloseOut);
}
else if ("plain".equals(format)) {
listener = new DefaultLogger(aOut, aCloseOut);
}
else {
System.out.println("Invalid format: (" + format
+ "). Must be 'plain' or 'xml'.");
usage();
}
return listener;
}
/**
* Loads the configuration file. Will exit if unable to load.
*
* @param aLine specifies the location of the configuration
* @param aProps the properties to resolve with the configuration
* @return a fresh new configuration
*/
private static Configuration loadConfig(CommandLine aLine,
Properties aProps)
{
try {
return ConfigurationLoader.loadConfiguration(
aLine.getOptionValue("c"), new PropertiesExpander(aProps));
}
catch (final CheckstyleException e) {
System.out.println("Error loading configuration file");
e.printStackTrace(System.out);
System.exit(1);
return null; // can never get here
}
}
/** Prints the usage information. **/
private static void usage()
{
final HelpFormatter hf = new HelpFormatter();
hf.printHelp(
"java "
+ Main.class.getName()
+ " [options] -c <config.xml> file...",
OPTS);
System.exit(1);
}
/**
* Traverses a specified node looking for files to check. Found
* files are added to a specified list. Subdirectories are also
* traversed.
*
* @param aNode the node to process
* @param aFiles list to add found files to
*/
private static void traverse(File aNode, List<File> aFiles)
{
if (aNode.canRead()) {
if (aNode.isDirectory()) {
final File[] nodes = aNode.listFiles();
for (File element : nodes) {
traverse(element, aFiles);
}
}
else if (aNode.isFile()) {
aFiles.add(aNode);
}
}
}
/**
* Loads properties from a File.
* @param aFile the properties file
* @return the properties in aFile
*/
private static Properties loadProperties(File aFile)
{
final Properties properties = new Properties();
FileInputStream fis = null;
try {
fis = new FileInputStream(aFile);
properties.load(fis);
}
catch (final IOException ex) {
System.out.println("Unable to load properties from file: "
+ aFile.getAbsolutePath());
ex.printStackTrace(System.out);
System.exit(1);
}
finally {
Utils.closeQuietly(fis);
}
return properties;
}
}
|
package com.rultor.agents.daemons;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Predicate;
import com.google.common.base.Splitter;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.jcabi.aspects.Immutable;
import com.jcabi.aspects.Tv;
import com.jcabi.log.Logger;
import com.jcabi.ssh.Shell;
import com.jcabi.xml.XML;
import com.rultor.Time;
import com.rultor.agents.AbstractAgent;
import com.rultor.agents.shells.TalkShells;
import com.rultor.profiles.ProfileDeprecationAware;
import com.rultor.spi.Profile;
import java.io.IOException;
import java.util.Collection;
import lombok.EqualsAndHashCode;
import lombok.ToString;
import org.apache.commons.lang3.StringUtils;
import org.xembly.Directive;
import org.xembly.Directives;
import org.xembly.Xembler;
/**
* Marks the daemon as done.
*
* @author Yegor Bugayenko (yegor@teamed.io)
* @version $Id$
* @since 1.0
* @checkstyle ClassDataAbstractionCouplingCheck (500 lines)
* @todo #1053:30 min Reduce the data abstraction coupling of EndsDaemon in
* order to get rid of the checkstyle suppression of
* ClassDataAbstractionCouplingCheck
*/
@Immutable
@ToString
@EqualsAndHashCode(callSuper = false)
public final class EndsDaemon extends AbstractAgent {
/**
* Prefix for log highlights.
*/
public static final String HIGHLIGHTS_PREFIX = "RULTOR: ";
/**
* The profile to check.
*/
private final transient ProfileDeprecationAware profile;
/**
* Ctor.
* @param prof Profile
*/
public EndsDaemon(final Profile prof) {
super("/talk/daemon[started and not(code) and not(ended)]");
this.profile = new ProfileDeprecationAware(prof);
}
@Override
public Iterable<Directive> process(final XML xml) throws IOException {
final Shell shell = new TalkShells(xml).get();
this.profile.check(shell);
final String dir = xml.xpath("/talk/daemon/dir/text()").get(0);
final int exit = new Script("end.sh").exec(xml);
final Directives dirs = new Directives();
if (exit == 0) {
Logger.info(
this, "the daemon is still running in %s (%s)",
dir, xml.xpath("/talk/@name").get(0)
);
} else {
dirs.append(this.end(shell, dir));
}
return dirs;
}
/**
* End this daemon.
* @param shell Shell
* @param dir The dir
* @return Directives
* @throws IOException If fails
*/
private Iterable<Directive> end(final Shell shell,
final String dir) throws IOException {
final int exit = EndsDaemon.exit(shell, dir);
final String stdout = new ShellCommand(
shell,
dir,
"cat stdout"
).exec();
final Collection<String> lines = Lists.newArrayList(
Splitter.on(System.lineSeparator()).split(stdout)
);
final String highlights = Joiner.on("\n").join(
Iterables.transform(
Iterables.filter(
lines,
new Predicate<String>() {
@Override
public boolean apply(final String input) {
return input.startsWith(
EndsDaemon.HIGHLIGHTS_PREFIX
);
}
}
),
new Function<String, String>() {
@Override
public String apply(final String str) {
return StringUtils.removeStart(
str, EndsDaemon.HIGHLIGHTS_PREFIX
);
}
}
)
);
Logger.info(this, "daemon finished at %s, exit: %d", dir, exit);
return new Directives()
.xpath("/talk/daemon")
.strict(1)
.add("ended").set(new Time().iso()).up()
.add("code").set(Integer.toString(exit)).up()
.add("highlights").set(Xembler.escape(highlights)).up()
.add("tail")
.set(
Xembler.escape(
StringUtils.substring(
Joiner.on(System.lineSeparator()).join(
Iterables.skip(
lines,
Math.max(lines.size() - Tv.SIXTY, 0)
)
),
-Tv.HUNDRED * Tv.THOUSAND
)
)
);
}
/**
* Get exit code.
* @param shell Shell
* @param dir The dir
* @return Exit code
* @throws IOException If fails
*/
private static int exit(final Shell shell, final String dir)
throws IOException {
final String status = new ShellCommand(
shell,
dir,
"if [ ! -e status ]; then echo 127; exit; fi; cat status"
).exec().trim().replaceAll("[^0-9]", "");
final int exit;
if (status.isEmpty()) {
exit = 1;
} else {
exit = Integer.parseInt(status);
}
return exit;
}
}
|
package com.sdl.selenium.web.table;
import com.sdl.selenium.web.SearchType;
import com.sdl.selenium.web.WebLocator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TableCell extends Cell {
private static final Logger LOGGER = LoggerFactory.getLogger(TableCell.class);
public TableCell() {
setRenderMillis(200);
setClassName("TableCell");
setTag("td");
defaultSearchTextType.add(SearchType.DEEP_CHILD_NODE_OR_SELF);
}
public TableCell(WebLocator container) {
this();
setContainer(container);
}
public TableCell(WebLocator container, int columnIndex) {
this(container);
setPosition(columnIndex);
}
public TableCell(int columnIndex, String columnText, SearchType... searchType) {
this();
setPosition(columnIndex);
setText(columnText);
setSearchTextType(searchType);
}
public TableCell(WebLocator container, int columnIndex, String columnText, SearchType... searchType) {
this(container, columnIndex);
setText(columnText);
setSearchTextType(searchType);
}
@Override
protected String addPositionToPath(String itemPath) {
if (hasPosition()) {
int beginIndex = 2 + getTag().length();
itemPath = "//" + getTag() + "[" + getPosition() + "]" + itemPath.substring(beginIndex);
}
return itemPath;
}
}
|
package com.skcraft.plume.module;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.inject.Inject;
import com.sk89q.intake.Command;
import com.sk89q.intake.Require;
import com.sk89q.intake.parametric.annotation.Text;
import com.skcraft.plume.common.UserId;
import com.skcraft.plume.common.service.auth.Group;
import com.skcraft.plume.common.service.auth.User;
import com.skcraft.plume.common.service.ban.Ban;
import com.skcraft.plume.common.service.ban.BanManager;
import com.skcraft.plume.common.util.Environment;
import com.skcraft.plume.common.util.config.Config;
import com.skcraft.plume.common.util.config.InjectConfig;
import com.skcraft.plume.common.util.module.Module;
import com.skcraft.plume.common.util.service.InjectService;
import com.skcraft.plume.common.util.service.Service;
import com.skcraft.plume.util.Messages;
import com.skcraft.plume.util.ProfileService;
import com.skcraft.plume.util.Profiles;
import com.skcraft.plume.util.concurrent.BackgroundExecutor;
import com.skcraft.plume.util.concurrent.TickExecutorService;
import lombok.extern.java.Log;
import net.minecraft.command.ICommandSender;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.server.MinecraftServer;
import ninja.leaping.configurate.objectmapping.Setting;
import java.io.IOException;
import java.util.Date;
@Module(name = "ban-commands")
@Log
public class BanCommands {
@Inject private BackgroundExecutor executor;
@Inject private ProfileService profileService;
@InjectService private Service<BanManager> banManager;
@Inject private TickExecutorService tickExecutorService;
@Inject private Environment environment;
@InjectConfig("ban_commands") private Config<BansConfig> config;
@Command(aliases = "ban", desc = "Ban a user")
@Require("plume.bans.ban")
public void ban(ICommandSender sender, String name, @Text String reason) {
BanManager banMan = this.banManager.provide();
UserId issuer;
if (sender instanceof EntityPlayer) {
issuer = Profiles.fromPlayer((EntityPlayer) sender);
} else {
issuer = null;
}
ListenableFuture<?> future = executor.getExecutor().submit(() -> {
UserId userId;
try {
userId = profileService.findUserId(name);
} catch (IOException e) {
sender.addChatMessage(Messages.error("Couldn't look up the user information for '" + name + "'."));
return;
}
if (userId != null) {
Ban currentBan = new Ban();
currentBan.setUserId(userId);
currentBan.setExpireTime(null);
currentBan.setHeuristic(false);
currentBan.setIssueBy(issuer);
currentBan.setIssueTime(new Date());
currentBan.setReason(reason);
currentBan.setServer(environment.getServerId());
banMan.addBan(currentBan);
tickExecutorService.execute(() -> {
EntityPlayerMP targetPlayer = MinecraftServer.getServer().getConfigurationManager().func_152612_a(name);
if (targetPlayer != null) targetPlayer.playerNetServerHandler.kickPlayerFromServer(config.get().kickMessage);
sender.addChatMessage(Messages.info(currentBan.getUserId().getName() + " has been banned from the server."));
});
} else {
sender.addChatMessage(Messages.error("Couldn't find a Minecraft account with the name '" + name + "'."));
}
});
executor.addCallbacks(future, sender);
}
@Command(aliases = "pardon", desc = "Pardon a user")
@Require("plume.bans.pardon")
public void pardon(ICommandSender sender, String name, @Text String reason) {
BanManager banMan = this.banManager.provide();
UserId issuer;
if (sender instanceof EntityPlayer) {
issuer = Profiles.fromPlayer((EntityPlayer) sender);
} else {
issuer = null;
}
ListenableFuture<?> future = executor.getExecutor().submit(() -> {
UserId userId;
try {
userId = profileService.findUserId(name);
} catch (IOException e) {
sender.addChatMessage(Messages.error("Couldn't look up the user information for '" + name + "'."));
return;
}
if (userId != null) {
banMan.pardon(userId, issuer, reason);
tickExecutorService.execute(() -> {
sender.addChatMessage(Messages.info(userId.getName() + " has been pardoned from the server."));
});
} else {
sender.addChatMessage(Messages.error("Couldn't find a Minecraft account with the name '" + name + "'."));
}
});
executor.addCallbacks(future, sender);
}
private static class BansConfig {
@Setting(comment = "The message the player sees when s/he is banned")
private String kickMessage = "Your access has been suspended.";
}
}
|
package de.dhbw.humbuch.model.entity;
import java.util.ArrayList;
import java.util.Date;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.Table;
@Entity
@Table(name="student")
public class Student implements de.dhbw.humbuch.model.entity.Entity {
@Id
private int id;
@ManyToOne(fetch=FetchType.LAZY, cascade = {CascadeType.PERSIST, CascadeType.MERGE})
@JoinColumn(name="gradeId", referencedColumnName="id")
private Grade grade;
private String lastname;
private String firstname;
private Date birthday;
private String gender;
private boolean leavingSchool;
@OneToMany(mappedBy="student", fetch=FetchType.LAZY)
private List<BorrowedMaterial> borrowedList = new ArrayList<BorrowedMaterial>();
@ElementCollection(targetClass=Subject.class)
@Enumerated(EnumType.STRING)
@CollectionTable(name="studentSubject", joinColumns = @JoinColumn(name="studentId"))
@Column(name="subject")
private Set<Subject> profile = EnumSet.noneOf(Subject.class);
@OneToOne(fetch=FetchType.LAZY, cascade = {CascadeType.PERSIST, CascadeType.MERGE})
@JoinColumn(name="parentId")
private Parent parent;
public Student() {}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public Grade getGrade() {
return grade;
}
public void setGrade(Grade grade) {
this.grade = grade;
}
public String getLastname() {
return lastname;
}
public void setLastname(String lastname) {
this.lastname = lastname;
}
public String getFirstname() {
return firstname;
}
public void setFirstname(String firstname) {
this.firstname = firstname;
}
public Date getBirthday() {
return birthday;
}
public void setBirthday(Date birthday) {
this.birthday = birthday;
}
public String getGender() {
return gender;
}
public void setGender(String gender) {
this.gender = gender;
}
public Parent getParent() {
return parent;
}
public void setParent(Parent parent) {
this.parent = parent;
}
public List<BorrowedMaterial> getBorrowedList() {
return borrowedList;
}
public void setBorrowedList(List<BorrowedMaterial> borrowedList) {
this.borrowedList = borrowedList;
}
public Set<Subject> getProfile() {
return profile;
}
public void setProfile(Set<Subject> profile) {
this.profile = profile;
}
public boolean isLeavingSchool() {
return leavingSchool;
}
public void setLeavingSchool(boolean leavingSchool) {
this.leavingSchool = leavingSchool;
}
public static class Builder {
private final int id;
private final String firstname;
private final String lastname;
private final Date birthday;
private final Grade grade;
private String gender;
private List<BorrowedMaterial> borrowedList = new ArrayList<BorrowedMaterial>();
private Set<Subject> profile = EnumSet.noneOf(Subject.class);
private Parent parent;
private boolean leavingSchool;
public Builder(int id, String firstname, String lastname, Date birthday, Grade grade) {
this.id = id;
this.firstname = firstname;
this.lastname = lastname;
this.birthday = birthday;
this.grade = grade;
}
public Builder gender(String gender) {
this.gender = gender;
return this;
}
public Builder borrowedList(List<BorrowedMaterial> borrowedList) {
this.borrowedList = borrowedList;
return this;
}
public Builder profile(Set<Subject> profile) {
this.profile = profile;
return this;
}
public Builder parent(Parent parent) {
this.parent = parent;
return this;
}
public Builder leavingSchool(boolean leavingSchool){
this.leavingSchool = leavingSchool;
return this;
}
public Student build() {
return new Student(this);
}
}
private Student(Builder builder) {
id = builder.id;
firstname = builder.firstname;
lastname = builder.lastname;
birthday = builder.birthday;
grade = builder.grade;
leavingSchool = builder.leavingSchool;
gender = builder.gender;
borrowedList = builder.borrowedList;
profile = builder.profile;
parent = builder.parent;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Student other = (Student) obj;
if (id != other.id)
return false;
return true;
}
public boolean hasUnreceivedBorrowedMaterials() {
for(BorrowedMaterial borrowedMaterial : getBorrowedList()) {
if(!borrowedMaterial.isReceived()) {
return true;
}
}
return false;
}
}
|
package edu.chl.proton.model;
public class DocumentFactory {
//use getShape method to get object of type shape
private File file; //fileName maybe is more tydlig?
private Document document;
public DocumentFactory(){
// kolla om file redan finns, annars typ new file
}
//TODO: Checker for existing document file, if exist -> send file to documentClass (oklart om vi verkligen ska ha detta)
private void checkExistingFile(){
}
public Document createDocument(DocumentType documentType){
if(documentType == null){
return null;
}
//TODO: Create classes Plain and so on
if(documentType==DocumentType.PLAIN){
//return new PlainDocument(file); // Start with nothing
} else if(documentType==DocumentType.MARKDOWN){
//return new Markdown(file); // Cursor cursor, File file, List<Parts> parts, List<String> lines
return new Markdown();
} else if(documentType==DocumentType.SLIDE){
//return new SlideDocument(file); // Mode where you can do some notes on each slide (MAIN POINT, DETAILS, PICTURE)
} else if(documentType==DocumentType.ASSIGNMENT){
//return new AssignmentDocument(file); // Template for assaignments, i.e. front page and subsections for each assignment.
}
return null;
}
// if no document exists, create one. then send it
public Document getDocument(String filePath){
return createDocument(DocumentType.MARKDOWN);
}
}
|
package edu.neu.ccs.pyramid.application;
import com.fasterxml.jackson.databind.ObjectMapper;
import edu.neu.ccs.pyramid.configuration.Config;
import edu.neu.ccs.pyramid.dataset.*;
import edu.neu.ccs.pyramid.eval.Accuracy;
import edu.neu.ccs.pyramid.eval.Overlap;
import edu.neu.ccs.pyramid.feature.TopFeatures;
import edu.neu.ccs.pyramid.feature_selection.FeatureDistribution;
import edu.neu.ccs.pyramid.multilabel_classification.MultiLabelPredictionAnalysis;
import edu.neu.ccs.pyramid.multilabel_classification.imlgb.IMLGBConfig;
import edu.neu.ccs.pyramid.multilabel_classification.imlgb.IMLGBInspector;
import edu.neu.ccs.pyramid.multilabel_classification.imlgb.IMLGBTrainer;
import edu.neu.ccs.pyramid.multilabel_classification.imlgb.IMLGradientBoosting;
import edu.neu.ccs.pyramid.util.Serialization;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.time.StopWatch;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class App2 {
public static void main(String[] args) throws Exception{
if (args.length !=1){
throw new IllegalArgumentException("please specify the config file");
}
Config config = new Config(args[0]);
System.out.println(config);
new File(config.getString("output.folder")).mkdirs();
if (config.getBoolean("train")){
train(config);
report(config,config.getString("input.trainData"));
}
if (config.getBoolean("test")){
report(config,config.getString("input.testData"));
}
}
public static void main(Config config) throws Exception{
new File(config.getString("output.folder")).mkdirs();
if (config.getBoolean("train")){
train(config);
report(config,config.getString("input.trainData"));
}
if (config.getBoolean("test")){
report(config,config.getString("input.testData"));
}
}
static MultiLabelClfDataSet loadData(Config config, String dataName) throws Exception{
File dataFile = new File(new File(config.getString("input.folder"),
"data_sets"),dataName);
MultiLabelClfDataSet dataSet = TRECFormat.loadMultiLabelClfDataSet(dataFile, DataSetType.ML_CLF_SPARSE,
true);
return dataSet;
}
// static MultiLabelClfDataSet loadTrainData(Config config) throws Exception{
// String trainFile = new File(config.getString("input.folder"),
// config.getString("input.trainData")).getAbsolutePath();
// MultiLabelClfDataSet dataSet;
// if (config.getBoolean("input.featureMatrix.sparse")){
// dataSet= TRECFormat.loadMultiLabelClfDataSet(new File(trainFile), DataSetType.ML_CLF_SPARSE,
// true);
// } else {
// dataSet= TRECFormat.loadMultiLabelClfDataSet(new File(trainFile), DataSetType.ML_CLF_DENSE,
// true);
// return dataSet;
// static MultiLabelClfDataSet loadTestData(Config config) throws Exception{
// String trainFile = new File(config.getString("input.folder"),
// config.getString("input.testData")).getAbsolutePath();
// MultiLabelClfDataSet dataSet;
// if (config.getBoolean("input.featureMatrix.sparse")){
// dataSet= TRECFormat.loadMultiLabelClfDataSet(new File(trainFile), DataSetType.ML_CLF_SPARSE,
// true);
// } else {
// dataSet= TRECFormat.loadMultiLabelClfDataSet(new File(trainFile), DataSetType.ML_CLF_DENSE,
// true);
// return dataSet;
static void train(Config config) throws Exception{
String output = config.getString("output.folder");
int numIterations = config.getInt("train.numIterations");
int numLeaves = config.getInt("train.numLeaves");
double learningRate = config.getDouble("train.learningRate");
int minDataPerLeaf = config.getInt("train.minDataPerLeaf");
String modelName = "model";
double featureSamplingRate = config.getDouble("train.featureSamplingRate");
double dataSamplingRate = config.getDouble("train.dataSamplingRate");
StopWatch stopWatch = new StopWatch();
stopWatch.start();
MultiLabelClfDataSet dataSet = loadData(config,config.getString("input.trainData"));
MultiLabelClfDataSet testSet = null;
if (config.getBoolean("train.showTrainProgress")){
testSet = loadData(config,config.getString("input.testData"));
}
int[] activeFeatures = IntStream.range(0, dataSet.getNumFeatures()).toArray();
int numClasses = dataSet.getNumClasses();
System.out.println("number of class = "+numClasses);
IMLGBConfig imlgbConfig = new IMLGBConfig.Builder(dataSet)
.dataSamplingRate(dataSamplingRate)
.featureSamplingRate(featureSamplingRate)
.learningRate(learningRate)
.minDataPerLeaf(minDataPerLeaf)
.numLeaves(numLeaves)
.numSplitIntervals(config.getInt("train.numSplitIntervals"))
.usePrior(config.getBoolean("train.usePrior"))
.build();
IMLGradientBoosting boosting;
if (config.getBoolean("train.warmStart")){
boosting = IMLGradientBoosting.deserialize(new File(output,modelName));
} else {
boosting = new IMLGradientBoosting(numClasses);
}
String predictFashion = config.getString("predict.fashion").toLowerCase();
switch (predictFashion){
case "crf":
boosting.setPredictFashion(IMLGradientBoosting.PredictFashion.CRF);
break;
case "independent":
boosting.setPredictFashion(IMLGradientBoosting.PredictFashion.INDEPENDENT);
break;
default:
throw new IllegalArgumentException("predict.fashion should be independent or crf");
}
IMLGBTrainer trainer = new IMLGBTrainer(imlgbConfig,boosting);
//todo make it better
trainer.setActiveFeatures(activeFeatures);
int progressInterval = config.getInt("train.showProgress.interval");
for (int i=0;i<numIterations;i++){
System.out.println("iteration "+i);
trainer.iterate();
// System.out.println("model size = "+boosting.getRegressors(0).size());
if (config.getBoolean("train.showTrainProgress") && (i%progressInterval==0)){
System.out.println("accuracy on training set = "+ Accuracy.accuracy(boosting,
dataSet));
System.out.println("overlap on training set = "+ Overlap.overlap(boosting, dataSet));
}
if (config.getBoolean("train.showTestProgress") && (i%progressInterval==0)){
System.out.println("accuracy on test set = "+ Accuracy.accuracy(boosting,
testSet));
System.out.println("overlap on test set = "+ Overlap.overlap(boosting, testSet));
}
}
File serializedModel = new File(output,modelName);
boosting.serialize(serializedModel);
System.out.println(stopWatch);
}
static void report(Config config, String dataName) throws Exception{
System.out.println("generating reports for data set "+dataName);
String output = config.getString("output.folder");
String modelName = "model";
File analysisFolder = new File(new File(output,"reports"),dataName+"_reports");
analysisFolder.mkdirs();
FileUtils.cleanDirectory(analysisFolder);
IMLGradientBoosting boosting = IMLGradientBoosting.deserialize(new File(output,modelName));
String predictFashion = config.getString("predict.fashion").toLowerCase();
switch (predictFashion){
case "crf":
boosting.setPredictFashion(IMLGradientBoosting.PredictFashion.CRF);
break;
case "independent":
boosting.setPredictFashion(IMLGradientBoosting.PredictFashion.INDEPENDENT);
break;
}
MultiLabelClfDataSet dataSet = loadData(config,dataName);
System.out.println("accuracy on data set = "+Accuracy.accuracy(boosting,dataSet));
System.out.println("overlap on data set = "+ Overlap.overlap(boosting,dataSet));
// System.out.println("macro-averaged measure on training set:");
// System.out.println(new MacroAveragedMeasures(boosting,dataSet));
if (true){
File distributionFile = new File(new File(config.getString("input.folder"), "meta_data"),"distributions.ser");
Collection<FeatureDistribution> distributions = (Collection) Serialization.deserialize(distributionFile);
int limit = config.getInt("report.topFeatures.limit");
List<TopFeatures> topFeaturesList = IntStream.range(0,boosting.getNumClasses())
.mapToObj(k -> IMLGBInspector.topFeatures(boosting, k, limit, distributions))
.collect(Collectors.toList());
ObjectMapper mapper = new ObjectMapper();
String file = "top_features.json";
mapper.writeValue(new File(analysisFolder,file), topFeaturesList);
}
// if (config.getBoolean("verify.topNgramsFeatures")){
// for (int k=0;k<dataSet.getNumClasses();k++) {
// List<Feature> featureNames = IMLGBInspector.topFeatures(boosting, k)
// .stream().filter(feature -> feature instanceof Ngram)
// .map(feature -> (Ngram)feature)
// .filter(ngram -> ngram.getN()>1)
// .collect(Collectors.toList());
// System.out.println("top ngram features for class " + k + "(" + labelTranslator.toExtLabel(k) + "):");
// System.out.println(featureNames);
if (true){
int limit = config.getInt("report.rule.limit");
int numDocsPerFile = config.getInt("report.numDocsPerFile");
int numFiles = (int)Math.ceil((double)dataSet.getNumDataPoints()/numDocsPerFile);
double probThreshold=config.getDouble("report.classProbThreshold");
for (int i=0;i<numFiles;i++){
int start = i*numDocsPerFile;
int end = start+numDocsPerFile;
List<MultiLabelPredictionAnalysis> partition = new ArrayList<>();
for (int a=start;a<end && a<dataSet.getNumDataPoints();a++){
List<Integer> classes = new ArrayList<Integer>();
for (int k = 0; k < boosting.getNumClasses(); k++){
if (boosting.predictClassProb(dataSet.getRow(a),k)>=probThreshold){
classes.add(k);
}
}
partition.add(IMLGBInspector.analyzePrediction(boosting, dataSet, a, classes, limit));
}
ObjectMapper mapper = new ObjectMapper();
String file = "report_"+(i+1)+".json";
mapper.writeValue(new File(analysisFolder,file), partition);
}
}
System.out.println("reports generated");
}
}
|
package edu.neu.ccs.pyramid.application;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;
import edu.neu.ccs.pyramid.configuration.Config;
import edu.neu.ccs.pyramid.dataset.*;
import edu.neu.ccs.pyramid.eval.*;
import edu.neu.ccs.pyramid.feature.TopFeatures;
import edu.neu.ccs.pyramid.feature_selection.FeatureDistribution;
import edu.neu.ccs.pyramid.multilabel_classification.MultiLabelPredictionAnalysis;
import edu.neu.ccs.pyramid.multilabel_classification.PluginPredictor;
import edu.neu.ccs.pyramid.multilabel_classification.imlgb.*;
import edu.neu.ccs.pyramid.multilabel_classification.thresholding.MacroFMeasureTuner;
import edu.neu.ccs.pyramid.multilabel_classification.thresholding.TunedMarginalClassifier;
import edu.neu.ccs.pyramid.util.Progress;
import edu.neu.ccs.pyramid.util.Serialization;
import edu.neu.ccs.pyramid.util.SetUtil;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.time.StopWatch;
import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class App2 {
public static void main(String[] args) throws Exception{
if (args.length !=1){
throw new IllegalArgumentException("Please specify a properties file.");
}
Config config = new Config(args[0]);
main(config);
}
public static void main(Config config) throws Exception{
System.out.println(config);
new File(config.getString("output.folder")).mkdirs();
if (config.getBoolean("train")){
train(config);
if (config.getString("predict.target").equals("macroFMeasure")){
System.out.println("predict.target=macroFMeasure, user needs to run 'tune' before predictions can be made. " +
"Reports will be generated after tuning.");
} else {
if (config.getBoolean("train.generateReports")){
report(config,config.getString("input.trainData"));
}
}
}
if (config.getBoolean("tune")){
tuneForMacroF(config);
if (config.getBoolean("train.generateReports")){
report(config,config.getString("input.trainData"));
}
}
if (config.getBoolean("test")){
report(config,config.getString("input.testData"));
}
}
static MultiLabelClfDataSet loadData(Config config, String dataName) throws Exception{
File dataFile = new File(new File(config.getString("input.folder"),
"data_sets"),dataName);
MultiLabelClfDataSet dataSet = TRECFormat.loadMultiLabelClfDataSet(dataFile, DataSetType.ML_CLF_SPARSE,
true);
return dataSet;
}
static void train(Config config) throws Exception{
String output = config.getString("output.folder");
int numIterations = config.getInt("train.numIterations");
int numLeaves = config.getInt("train.numLeaves");
double learningRate = config.getDouble("train.learningRate");
int minDataPerLeaf = config.getInt("train.minDataPerLeaf");
String modelName = "model_app3";
// double featureSamplingRate = config.getDouble("train.featureSamplingRate");
// double dataSamplingRate = config.getDouble("train.dataSamplingRate");
StopWatch stopWatch = new StopWatch();
stopWatch.start();
MultiLabelClfDataSet dataSet = loadData(config,config.getString("input.trainData"));
MultiLabelClfDataSet testSet = null;
if (config.getBoolean("train.showTestProgress")){
testSet = loadData(config,config.getString("input.testData"));
}
int[] activeFeatures = IntStream.range(0, dataSet.getNumFeatures()).toArray();
int numClasses = dataSet.getNumClasses();
System.out.println("number of class = "+numClasses);
IMLGBConfig imlgbConfig = new IMLGBConfig.Builder(dataSet)
// .dataSamplingRate(dataSamplingRate)
// .featureSamplingRate(featureSamplingRate)
.learningRate(learningRate)
.minDataPerLeaf(minDataPerLeaf)
.numLeaves(numLeaves)
.numSplitIntervals(config.getInt("train.numSplitIntervals"))
.usePrior(config.getBoolean("train.usePrior"))
.build();
IMLGradientBoosting boosting;
if (config.getBoolean("train.warmStart")){
boosting = IMLGradientBoosting.deserialize(new File(output,modelName));
} else {
boosting = new IMLGradientBoosting(numClasses);
}
System.out.println("During training, the performance is reported using Hamming loss optimal predictor");
System.out.println("initialing trainer");
IMLGBTrainer trainer = new IMLGBTrainer(imlgbConfig,boosting);
System.out.println("trainer initialized");
//todo make it better
// trainer.setActiveFeatures(activeFeatures);
int progressInterval = config.getInt("train.showProgress.interval");
for (int i=0;i<numIterations;i++){
System.out.println("iteration "+i);
trainer.iterate();
if (config.getBoolean("train.showTrainProgress") && (i%progressInterval==0 || i==numIterations-1)){
// todo
System.out.println("predicting");
MultiLabel[] a = boosting.predict(dataSet);
System.out.println("prediction done");
System.out.println("training set performance");
System.out.println(new MLMeasures(boosting,dataSet));
}
if (config.getBoolean("train.showTestProgress") && (i%progressInterval==0 || i==numIterations-1)){
System.out.println("test set performance");
System.out.println(new MLMeasures(boosting,testSet));
}
}
File serializedModel = new File(output,modelName);
boosting.serialize(serializedModel);
System.out.println(stopWatch);
}
static void tuneForMacroF(Config config) throws Exception{
System.out.println("start tuning for macro F measure");
String output = config.getString("output.folder");
String modelName = "model_app3";
double beta = config.getDouble("tune.FMeasure.beta");
IMLGradientBoosting boosting = IMLGradientBoosting.deserialize(new File(output,modelName));
String tuneBy = config.getString("tune.data");
String dataName;
switch (tuneBy){
case "train":
dataName = config.getString("input.trainData");
break;
case "test":
dataName = config.getString("input.testData");
break;
default:
throw new IllegalArgumentException("tune.data should be train or test");
}
MultiLabelClfDataSet dataSet = loadData(config,dataName);
double[] thresholds = MacroFMeasureTuner.tuneThresholds(boosting,dataSet,beta);
TunedMarginalClassifier tunedMarginalClassifier = new TunedMarginalClassifier(boosting,thresholds);
Serialization.serialize(tunedMarginalClassifier, new File(output,"predictor_macro_f"));
System.out.println("finish tuning for macro F measure");
}
static void report(Config config, String dataName) throws Exception{
System.out.println("generating reports for data set "+dataName);
String output = config.getString("output.folder");
String modelName = "model_app3";
File analysisFolder = new File(new File(output,"reports_app3"),dataName+"_reports");
analysisFolder.mkdirs();
FileUtils.cleanDirectory(analysisFolder);
IMLGradientBoosting boosting = IMLGradientBoosting.deserialize(new File(output,modelName));
String predictTarget = config.getString("predict.target");
PluginPredictor<IMLGradientBoosting> pluginPredictorTmp = null;
switch (predictTarget){
case "subsetAccuracy":
pluginPredictorTmp = new SubsetAccPredictor(boosting);
break;
case "hammingLoss":
pluginPredictorTmp = new HammingPredictor(boosting);
break;
case "instanceFMeasure":
pluginPredictorTmp = new InstanceF1Predictor(boosting);
break;
case "macroFMeasure":
TunedMarginalClassifier tunedMarginalClassifier = (TunedMarginalClassifier)Serialization.deserialize(new File(output, "predictor_macro_f"));
pluginPredictorTmp = new MacroF1Predictor(boosting,tunedMarginalClassifier);
break;
default:
throw new IllegalArgumentException("unknown prediction target measure "+predictTarget);
}
// just to make Lambda expressions happy
final PluginPredictor<IMLGradientBoosting> pluginPredictor = pluginPredictorTmp;
MultiLabelClfDataSet dataSet = loadData(config,dataName);
MLMeasures mlMeasures = new MLMeasures(pluginPredictor,dataSet);
mlMeasures.getMacroAverage().setLabelTranslator(boosting.getLabelTranslator());
System.out.println("performance on dataset "+dataName);
System.out.println(mlMeasures);
boolean simpleCSV = true;
if (simpleCSV){
System.out.println("start generating simple CSV report");
double probThreshold=config.getDouble("report.classProbThreshold");
File csv = new File(analysisFolder,"report.csv");
List<String> strs = IntStream.range(0,dataSet.getNumDataPoints()).parallel()
.mapToObj(i->IMLGBInspector.simplePredictionAnalysis(boosting,pluginPredictor,dataSet,i,probThreshold))
.collect(Collectors.toList());
StringBuilder sb = new StringBuilder();
for (int i=0;i<dataSet.getNumDataPoints();i++){
String str = strs.get(i);
sb.append(str);
}
FileUtils.writeStringToFile(csv,sb.toString(),false);
System.out.println("finish generating simple CSV report");
}
boolean topFeaturesToJson = false;
File distributionFile = new File(new File(config.getString("input.folder"), "meta_data"),"distributions.ser");
if (distributionFile.exists()){
topFeaturesToJson = true;
}
if (topFeaturesToJson){
System.out.println("start writing top features");
Collection<FeatureDistribution> distributions = (Collection) Serialization.deserialize(distributionFile);
int limit = config.getInt("report.topFeatures.limit");
List<TopFeatures> topFeaturesList = IntStream.range(0,boosting.getNumClasses())
.mapToObj(k -> IMLGBInspector.topFeatures(boosting, k, limit, distributions))
.collect(Collectors.toList());
ObjectMapper mapper = new ObjectMapper();
String file = "top_features.json";
mapper.writeValue(new File(analysisFolder,file), topFeaturesList);
System.out.println("finish writing top features");
}
boolean rulesToJson = true;
if (rulesToJson){
System.out.println("start writing rules to json");
int ruleLimit = config.getInt("report.rule.limit");
int numDocsPerFile = config.getInt("report.numDocsPerFile");
int numFiles = (int)Math.ceil((double)dataSet.getNumDataPoints()/numDocsPerFile);
double probThreshold=config.getDouble("report.classProbThreshold");
int labelSetLimit = config.getInt("report.labelSetLimit");
IntStream.range(0,numFiles).forEach(i->{
int start = i*numDocsPerFile;
int end = start+numDocsPerFile;
List<MultiLabelPredictionAnalysis> partition = IntStream.range(start,Math.min(end,dataSet.getNumDataPoints())).parallel().mapToObj(a->
IMLGBInspector.analyzePrediction(boosting, pluginPredictor, dataSet, a, ruleLimit,labelSetLimit, probThreshold)).collect(Collectors.toList());
ObjectMapper mapper = new ObjectMapper();
String file = "report_"+(i+1)+".json";
try {
mapper.writeValue(new File(analysisFolder,file), partition);
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("progress = "+ Progress.percentage(i+1,numFiles));
});
System.out.println("finish writing rules to json");
}
boolean dataInfoToJson = true;
if (dataInfoToJson){
System.out.println("start writing data info to json");
Set<String> modelLabels = IntStream.range(0,boosting.getNumClasses()).mapToObj(i->boosting.getLabelTranslator().toExtLabel(i))
.collect(Collectors.toSet());
Set<String> dataSetLabels = DataSetUtil.gatherLabels(dataSet).stream().map(i -> dataSet.getLabelTranslator().toExtLabel(i))
.collect(Collectors.toSet());
JsonGenerator jsonGenerator = new JsonFactory().createGenerator(new File(analysisFolder,"data_info.json"), JsonEncoding.UTF8);
jsonGenerator.writeStartObject();
jsonGenerator.writeStringField("dataSet",dataName);
jsonGenerator.writeNumberField("numClassesInModel",boosting.getNumClasses());
jsonGenerator.writeNumberField("numClassesInDataSet",dataSetLabels.size());
jsonGenerator.writeNumberField("numClassesInModelDataSetCombined",dataSet.getNumClasses());
Set<String> modelNotDataLabels = SetUtil.complement(modelLabels, dataSetLabels);
Set<String> dataNotModelLabels = SetUtil.complement(dataSetLabels,modelLabels);
jsonGenerator.writeNumberField("numClassesInDataSetButNotModel",dataNotModelLabels.size());
jsonGenerator.writeNumberField("numClassesInModelButNotDataSet",modelNotDataLabels.size());
jsonGenerator.writeArrayFieldStart("classesInDataSetButNotModel");
for (String label: dataNotModelLabels){
jsonGenerator.writeObject(label);
}
jsonGenerator.writeEndArray();
jsonGenerator.writeArrayFieldStart("classesInModelButNotDataSet");
for (String label: modelNotDataLabels){
jsonGenerator.writeObject(label);
}
jsonGenerator.writeEndArray();
jsonGenerator.writeNumberField("labelCardinality",dataSet.labelCardinality());
jsonGenerator.writeEndObject();
jsonGenerator.close();
System.out.println("finish writing data info to json");
}
boolean modelConfigToJson = true;
if (modelConfigToJson){
System.out.println("start writing model config to json");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.writeValue(new File(analysisFolder,"model_config.json"),config);
System.out.println("finish writing model config to json");
}
boolean dataConfigToJson = true;
if (dataConfigToJson){
System.out.println("start writing data config to json");
File dataConfigFile = Paths.get(config.getString("input.folder"),
"data_sets",dataName,"data_config.json").toFile();
if (dataConfigFile.exists()){
FileUtils.copyFileToDirectory(dataConfigFile,analysisFolder);
}
System.out.println("finish writing data config to json");
}
boolean performanceToJson = true;
if (performanceToJson){
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.writeValue(new File(analysisFolder,"performance.json"),mlMeasures);
}
boolean individualPerformance = true;
if (individualPerformance){
System.out.println("start writing individual label performance to json");
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.writeValue(new File(analysisFolder,"individual_performance.json"),mlMeasures.getMacroAverage());
System.out.println("finish writing individual label performance to json");
}
System.out.println("reports generated");
}
}
|
package edu.neu.ccs.pyramid.experiment;
import edu.neu.ccs.pyramid.classification.boosting.lktb.LKTBInspector;
import edu.neu.ccs.pyramid.configuration.Config;
import edu.neu.ccs.pyramid.dataset.*;
import edu.neu.ccs.pyramid.eval.Accuracy;
import edu.neu.ccs.pyramid.eval.MacroAveragedMeasures;
import edu.neu.ccs.pyramid.eval.Overlap;
import edu.neu.ccs.pyramid.multilabel_classification.imlgb.IMLGBConfig;
import edu.neu.ccs.pyramid.multilabel_classification.imlgb.IMLGBInspector;
import edu.neu.ccs.pyramid.multilabel_classification.imlgb.IMLGradientBoosting;
import org.apache.commons.lang3.time.StopWatch;
import java.io.File;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class Exp14 {
public static void main(String[] args) throws Exception{
if (args.length !=1){
throw new IllegalArgumentException("please specify the config file");
}
Config config = new Config(args[0]);
System.out.println(config);
if (config.getBoolean("train")){
train(config);
}
if (config.getBoolean("verify")){
verify(config);
}
if (config.getBoolean("test")){
test(config);
}
}
static MultiLabelClfDataSet loadTrainData(Config config) throws Exception{
String trainFile = new File(config.getString("input.folder"),
config.getString("input.trainData")).getAbsolutePath();
MultiLabelClfDataSet dataSet;
if (config.getBoolean("featureMatrix.sparse")){
dataSet= TRECFormat.loadMultiLabelClfDataSet(new File(trainFile), DataSetType.ML_CLF_SPARSE,
true);
} else {
dataSet= TRECFormat.loadMultiLabelClfDataSet(new File(trainFile), DataSetType.ML_CLF_DENSE,
true);
}
return dataSet;
}
static MultiLabelClfDataSet loadTestData(Config config) throws Exception{
String trainFile = new File(config.getString("input.folder"),
config.getString("input.testData")).getAbsolutePath();
MultiLabelClfDataSet dataSet;
if (config.getBoolean("featureMatrix.sparse")){
dataSet= TRECFormat.loadMultiLabelClfDataSet(new File(trainFile), DataSetType.ML_CLF_SPARSE,
true);
} else {
dataSet= TRECFormat.loadMultiLabelClfDataSet(new File(trainFile), DataSetType.ML_CLF_DENSE,
true);
}
return dataSet;
}
static void train(Config config) throws Exception{
String archive = config.getString("archive.folder");
int numIterations = config.getInt("train.numIterations");
int numLeaves = config.getInt("train.numLeaves");
double learningRate = config.getDouble("train.learningRate");
int minDataPerLeaf = config.getInt("train.minDataPerLeaf");
String modelName = config.getString("archive.model");
double featureSamplingRate = config.getDouble("train.featureSamplingRate");
double dataSamplingRate = config.getDouble("train.dataSamplingRate");
StopWatch stopWatch = new StopWatch();
stopWatch.start();
MultiLabelClfDataSet dataSet = loadTrainData(config);
MultiLabelClfDataSet testDataSet = loadTestData(config);
int numClasses = dataSet.getNumClasses();
System.out.println("number of class = "+numClasses);
IMLGBConfig imlgbConfig = new IMLGBConfig.Builder(dataSet)
.dataSamplingRate(dataSamplingRate)
.featureSamplingRate(featureSamplingRate)
.learningRate(learningRate)
.minDataPerLeaf(minDataPerLeaf)
.numLeaves(numLeaves)
.numSplitIntervals(config.getInt("train.numSplitIntervals"))
.build();
IMLGradientBoosting boosting = new IMLGradientBoosting(numClasses);
List<MultiLabel> assignments = DataSetUtil.gatherLabels(dataSet);
boosting.setAssignments(assignments);
boosting.setPriorProbs(dataSet);
boosting.setTrainConfig(imlgbConfig);
for (int i=0;i<numIterations;i++){
System.out.println("iteration "+i);
boosting.boostOneRound();
if (config.getBoolean("train.showPerformanceEachRound")){
System.out.println("accuracy on training set = "+ Accuracy.accuracy(boosting,
dataSet));
System.out.println("overlap on training set = "+ Overlap.overlap(boosting,dataSet));
System.out.println("accuracy on test set = "+ Accuracy.accuracy(boosting,
testDataSet));
System.out.println("overlap on test set = "+ Overlap.overlap(boosting,testDataSet));
}
}
File serializedModel = new File(archive,modelName);
boosting.serialize(serializedModel);
System.out.println(stopWatch);
}
static void verify(Config config) throws Exception{
String archive = config.getString("archive.folder");
String modelName = config.getString("archive.model");
IMLGradientBoosting boosting = IMLGradientBoosting.deserialize(new File(archive,modelName));
MultiLabelClfDataSet dataSet = loadTrainData(config);
LabelTranslator labelTranslator = dataSet.getSetting().getLabelTranslator();
System.out.println("accuracy on training set = "+Accuracy.accuracy(boosting,dataSet));
// System.out.println("overlap on training set = "+ Overlap.overlap(boosting,dataSet));
// System.out.println("macro-averaged measure on training set:");
// System.out.println(new MacroAveragedMeasures(boosting,dataSet));
if (config.getBoolean("verify.showPredictions")){
List<MultiLabel> prediction = boosting.predict(dataSet);
for (int i=0;i<dataSet.getNumDataPoints();i++){
System.out.println(""+i);
System.out.println("true labels:");
System.out.println(dataSet.getMultiLabels()[i]);
StringBuilder trueExtLabels = new StringBuilder();
for (int matched: dataSet.getMultiLabels()[i].getMatchedLabels()){
trueExtLabels.append(labelTranslator.toExtLabel(matched));
trueExtLabels.append(", ");
}
System.out.println(trueExtLabels);
System.out.println("predictions:");
System.out.println(prediction.get(i));
StringBuilder predictedExtLabels = new StringBuilder();
for (int matched: prediction.get(i).getMatchedLabels()){
predictedExtLabels.append(labelTranslator.toExtLabel(matched));
predictedExtLabels.append(", ");
}
System.out.println(predictedExtLabels);
}
}
if (config.getBoolean("verify.topFeatures")){
for (int k=0;k<dataSet.getNumClasses();k++) {
List<String> featureNames = IMLGBInspector.topFeatureNames(boosting, k);
System.out.println("top features for class " + k + "(" + labelTranslator.toExtLabel(k) + "):");
System.out.println(featureNames);
}
}
if (config.getBoolean("verify.topNgramsFeatures")){
for (int k=0;k<dataSet.getNumClasses();k++) {
List<String> featureNames = IMLGBInspector.topFeatureNames(boosting, k)
.stream().filter(name -> name.split(" ").length>1)
.collect(Collectors.toList());
System.out.println("top ngram features for class " + k + "(" + labelTranslator.toExtLabel(k) + "):");
System.out.println(featureNames);
}
}
}
static void test(Config config) throws Exception{
String archive = config.getString("archive.folder");
String modelName = config.getString("archive.model");
IMLGradientBoosting boosting = IMLGradientBoosting.deserialize(new File(archive,modelName));
MultiLabelClfDataSet dataSet = loadTestData(config);
System.out.println("accuracy on test set = "+Accuracy.accuracy(boosting,dataSet));
System.out.println("overlap on test set = "+ Overlap.overlap(boosting,dataSet));
// System.out.println("macro-averaged measure on test set:");
// System.out.println(new MacroAveragedMeasures(boosting,dataSet));
if (config.getBoolean("test.showPredictions")){
List<MultiLabel> prediction = boosting.predict(dataSet);
for (int i=0;i<dataSet.getNumDataPoints();i++){
System.out.println(""+i);
System.out.println("true labels:");
System.out.println(dataSet.getMultiLabels()[i]);
System.out.println("predictions:");
System.out.println(prediction.get(i));
}
}
if (config.getBoolean("test.analyzeMistakes")){
System.out.println("analyzing mistakes");
analyzeMistakes(config,boosting,dataSet);
}
}
static void analyzeMistakes(Config config, IMLGradientBoosting boosting, MultiLabelClfDataSet dataSet) throws Exception{
int numClassesInTrain = getNumClassesInTrain(config);
int numClassesInTest = dataSet.getNumClasses();
LabelTranslator labelTranslator = dataSet.getSetting().getLabelTranslator();
if (numClassesInTest>numClassesInTrain){
System.out.println("new labels not seen in the training set:");
for (int k=numClassesInTrain;k<numClassesInTest;k++){
System.out.println(""+k+"("+labelTranslator.toExtLabel(k)+")");
}
}
List<MultiLabel> predictions = boosting.predict(dataSet);
MultiLabel[] trueLabels = dataSet.getMultiLabels();
int limit = config.getInt("test.analyzeMistakes.limit");
for (int i=0;i<dataSet.getNumDataPoints();i++){
System.out.println("=======================================");
MultiLabel prediction = predictions.get(i);
MultiLabel trueLabel = trueLabels[i];
if (!prediction.equals(trueLabel)){
FeatureRow featureRow = dataSet.getFeatureRow(i);
System.out.println("data point "+i+" index id = "+featureRow.getSetting().getExtId());
if (prediction.outOfBound(numClassesInTrain)){
System.out.println("true labels = "+trueLabel.toStringWithExtLabels(labelTranslator));
System.out.println("predicted labels = "+prediction.toStringWithExtLabels(labelTranslator));
System.out.println("it contains unseen labels");
} else{
System.out.println(IMLGBInspector.analyzeMistake(boosting,featureRow,trueLabel,prediction,labelTranslator,limit));
}
}
}
}
static int getNumClassesInTrain(Config config) throws Exception{
MultiLabelClfDataSet dataSet = loadTrainData(config);
int numClasses = dataSet.getNumClasses();
return numClasses;
}
}
|
package edu.neu.ccs.pyramid.experiment;
import edu.neu.ccs.pyramid.configuration.Config;
import edu.neu.ccs.pyramid.dataset.*;
import edu.neu.ccs.pyramid.eval.Accuracy;
import edu.neu.ccs.pyramid.util.Pair;
import edu.neu.ccs.pyramid.util.Sampling;
import mltk.core.Instances;
import mltk.core.io.InstancesReader;
import mltk.predictor.Learner;
import mltk.predictor.glm.ElasticNetLearner;
import mltk.predictor.glm.GLM;
import org.apache.commons.math3.distribution.LogNormalDistribution;
import java.io.File;
import java.util.stream.IntStream;
public class Exp22 {
public static void main(String[] args) throws Exception{
if (args.length !=1){
throw new IllegalArgumentException("please specify the config file");
}
Config config = new Config(args[0]);
System.out.println(config);
run(config);
}
private static Config genHyperParams(){
Config config = new Config();
double lambda = Sampling.doubleLogUniform(0.001,1);
double l1Ratio = Sampling.doubleUniform(0, 1);
int iterations = Sampling.intUniform(50,200);
config.setDouble("lambda",lambda);
config.setDouble("l1Ratio",l1Ratio);
config.setInt("iterations",iterations);
return config;
}
private static void run(Config config) throws Exception{
for (int run=0;run<config.getInt("numRuns");run++){
Pair<ClfDataSet,ClfDataSet> dataSets = loadDataSets(config);
Instances trainSet = MLTKFormat.toInstances(dataSets.getFirst());
Instances validationSet = MLTKFormat.toInstances(dataSets.getSecond());
Config hyperParams = genHyperParams();
System.out.println("==============================");
System.out.println("hyper parameters for the run:");
System.out.println(hyperParams);
double lambda = hyperParams.getDouble("lambda");
double l1Ratio = hyperParams.getDouble("l1Ratio");
int iterations = hyperParams.getInt("iterations");
ElasticNetLearner learner = new ElasticNetLearner();
learner.setTask(Learner.Task.CLASSIFICATION);
learner.setMaxNumIters(iterations);
learner.setVerbose(false);
learner.setLambda(lambda);
learner.setL1Ratio(l1Ratio);
GLM glm = learner.build(trainSet);
int[] predictions= IntStream.range(0, trainSet.size()).map(i-> glm.classify(trainSet.get(i)))
.toArray();
int[] labels = IntStream.range(0,trainSet.size()).map(i-> (int)(trainSet.get(i).getTarget()))
.toArray();
System.out.println("accuracy on the training set = "+ Accuracy.accuracy(labels, predictions));
int[] validationPredictions= IntStream.range(0,validationSet.size()).map(i -> glm.classify(validationSet.get(i)))
.toArray();
int[] validationLabels = IntStream.range(0,validationSet.size()).map(i-> (int)(validationSet.get(i).getTarget()))
.toArray();
System.out.println("accuracy on the validation set = "+ Accuracy.accuracy(validationLabels,validationPredictions));
}
}
private static Pair<ClfDataSet, ClfDataSet> loadDataSets(Config config) throws Exception{
File trecFile = new File(config.getString("input.folder"),
config.getString("input.trainData"));
ClfDataSet clfDataSet = TRECFormat.loadClfDataSet(trecFile, DataSetType.CLF_SPARSE,false);
return DataSetUtil.splitToTrainValidation(clfDataSet,config.getDouble("trainPercentage"));
}
}
|
package edu.uiowa.icts.test;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.Statement;
import java.util.Properties;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.dbunit.DBTestCase;
import org.dbunit.PropertiesBasedJdbcDatabaseTester;
import org.dbunit.database.DatabaseConfig;
import org.dbunit.dataset.IDataSet;
import org.dbunit.dataset.xml.FlatXmlDataSetBuilder;
import org.junit.After;
import org.junit.Before;
import edu.uiowa.icts.service.PropertyLoader;
public abstract class BaseIctsTestCase extends DBTestCase {
protected final Log log = LogFactory.getLog( getClass().getName() );
protected Properties properties;
private String dataset = null;
private boolean useAltData = false;
private boolean buildSchema = false;
private String schemaSqlFile = null;
public BaseIctsTestCase() {
log.debug( "setting database properties" );
properties = PropertyLoader.loadProperties( "test.properties" );
System.setProperty( PropertiesBasedJdbcDatabaseTester.DBUNIT_DRIVER_CLASS, "org.hsqldb.jdbcDriver" );
System.setProperty( PropertiesBasedJdbcDatabaseTester.DBUNIT_CONNECTION_URL, "jdbc:hsqldb:mem:testdb" );
System.setProperty( PropertiesBasedJdbcDatabaseTester.DBUNIT_USERNAME, "sa" );
System.setProperty( PropertiesBasedJdbcDatabaseTester.DBUNIT_PASSWORD, "" );
}
@Before
public void setUp() throws Exception {
log.debug( "=== starting " + getName() + " =============================" );
log.debug( getConnection().getConfig().getProperty( "http:
if ( isBuildSchema() ) {
File f = new File( getSchemaSqlFile() );
Class.forName( System.getProperty( PropertiesBasedJdbcDatabaseTester.DBUNIT_DRIVER_CLASS ) );
Properties props = new Properties();
props.setProperty( "user", System.getProperty( PropertiesBasedJdbcDatabaseTester.DBUNIT_USERNAME ) );
props.setProperty( "password", System.getProperty( PropertiesBasedJdbcDatabaseTester.DBUNIT_PASSWORD ) );
Connection conn = DriverManager.getConnection( System.getProperty( PropertiesBasedJdbcDatabaseTester.DBUNIT_CONNECTION_URL ), props );
assertNotNull( conn );
StringWriter output = new StringWriter();
FileInputStream fis = new FileInputStream( f );
try {
IOUtils.copy( fis, output );
} finally {
fis.close();
}
Statement stmt = conn.createStatement();
stmt.execute( "SET DATABASE SQL SYNTAX PGS TRUE" );
stmt = conn.createStatement();
stmt.executeUpdate( output.toString() );
stmt.close();
}
super.setUp();
}
@After
public void tearDown() throws Exception {
log.debug( "=== ending " + getName() + " =============================\n" );
super.tearDown();
}
@Override
protected IDataSet getDataSet() throws Exception {
String file_name = getDatasetName();
if ( !isUseAltData() ) {
file_name = properties.getProperty( "dataset.file.name", "dataset.xml" );
}
log.debug( "loading " + file_name + " as data set" );
InputStream in = this.getClass().getClassLoader().getResourceAsStream( file_name );
FlatXmlDataSetBuilder fxdsb = new FlatXmlDataSetBuilder();
fxdsb.setCaseSensitiveTableNames( false );
fxdsb.setColumnSensing( true );
IDataSet datset;
try {
datset = fxdsb.build( in );
} finally {
in.close();
}
return datset;
}
protected void setUpDatabaseConfig( DatabaseConfig config ) {
log.debug( "setting database config" );
config.setProperty( "http:
config.setProperty( DatabaseConfig.FEATURE_QUALIFIED_TABLE_NAMES, true );
config.setProperty( DatabaseConfig.FEATURE_CASE_SENSITIVE_TABLE_NAMES, false );
}
public void setDataset( String dataset ) {
this.dataset = dataset;
setUseAltData( true );
}
public String getDatasetName() {
if ( dataset == null ) {
setDataset( "dataset.xml" );
}
return dataset;
}
public void setUseAltData( boolean useAltData ) {
this.useAltData = useAltData;
}
public boolean isUseAltData() {
return useAltData;
}
public boolean isBuildSchema() {
return buildSchema;
}
public void setBuildSchema( boolean buildSchema ) {
this.buildSchema = buildSchema;
}
public String getSchemaSqlFile() {
return schemaSqlFile;
}
public void setSchemaSqlFile( String schemaSqlFile ) {
this.schemaSqlFile = schemaSqlFile;
}
}
|
package gov.nasa.jpl.mbee;
import gov.nasa.jpl.mbee.actions.ViewViewCommentsAction;
import gov.nasa.jpl.mbee.actions.docgen.GenerateDocumentAction;
import gov.nasa.jpl.mbee.actions.docgen.InstanceViewpointAction;
import gov.nasa.jpl.mbee.actions.docgen.NumberDependencyAction;
import gov.nasa.jpl.mbee.actions.docgen.RunUserScriptAction;
import gov.nasa.jpl.mbee.actions.docgen.RunUserValidationScriptAction;
import gov.nasa.jpl.mbee.actions.docgen.ValidateDocument3Action;
import gov.nasa.jpl.mbee.actions.docgen.ValidateViewStructureAction;
import gov.nasa.jpl.mbee.actions.docgen.ViewDocument3Action;
import gov.nasa.jpl.mbee.actions.ems.ExportModelAction;
import gov.nasa.jpl.mbee.actions.ems.InitializeProjectAction;
import gov.nasa.jpl.mbee.actions.ems.ValidateModelAction;
import gov.nasa.jpl.mbee.actions.ems.ValidateViewAction;
import gov.nasa.jpl.mbee.actions.ems.ValidateViewRecursiveAction;
import gov.nasa.jpl.mbee.actions.vieweditor.ExportViewAction;
import gov.nasa.jpl.mbee.actions.vieweditor.ExportViewCommentsAction;
import gov.nasa.jpl.mbee.actions.vieweditor.ExportViewHierarchyAction;
import gov.nasa.jpl.mbee.actions.vieweditor.ExportViewRecursiveAction;
import gov.nasa.jpl.mbee.actions.vieweditor.ImportViewAction;
import gov.nasa.jpl.mbee.actions.vieweditor.ImportViewCommentsAction;
import gov.nasa.jpl.mbee.actions.vieweditor.ImportViewDryAction;
import gov.nasa.jpl.mbee.actions.vieweditor.ImportViewRecursiveAction;
import gov.nasa.jpl.mbee.actions.vieweditor.SynchronizeViewAction;
import gov.nasa.jpl.mbee.actions.vieweditor.SynchronizeViewRecursiveAction;
import gov.nasa.jpl.mbee.generator.DocumentGenerator;
import gov.nasa.jpl.mbee.lib.MDUtils;
import gov.nasa.jpl.mbee.lib.Utils;
import gov.nasa.jpl.mbee.model.CollectActionsVisitor;
import gov.nasa.jpl.mbee.model.Document;
import gov.nasa.jpl.mbee.model.UserScript;
import gov.nasa.jpl.mbee.viewedit.ViewEditUtils;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.nomagic.actions.ActionsCategory;
import com.nomagic.actions.ActionsManager;
import com.nomagic.actions.NMAction;
import com.nomagic.magicdraw.actions.BrowserContextAMConfigurator;
import com.nomagic.magicdraw.actions.ConfiguratorWithPriority;
import com.nomagic.magicdraw.actions.DiagramContextAMConfigurator;
import com.nomagic.magicdraw.actions.MDAction;
import com.nomagic.magicdraw.actions.MDActionsCategory;
import com.nomagic.magicdraw.core.Project;
import com.nomagic.magicdraw.ui.browser.Node;
import com.nomagic.magicdraw.ui.browser.Tree;
import com.nomagic.magicdraw.uml.symbols.DiagramPresentationElement;
import com.nomagic.magicdraw.uml.symbols.PresentationElement;
import com.nomagic.uml2.ext.jmi.helpers.StereotypesHelper;
import com.nomagic.uml2.ext.magicdraw.activities.mdfundamentalactivities.Activity;
import com.nomagic.uml2.ext.magicdraw.auxiliaryconstructs.mdmodels.Model;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.Element;
import com.nomagic.uml2.ext.magicdraw.classes.mdkernel.NamedElement;
import com.nomagic.uml2.ext.magicdraw.mdprofiles.Stereotype;
public class DocGenConfigurator implements BrowserContextAMConfigurator, DiagramContextAMConfigurator {
private Set<ActionsManager> viewQueryCalled = new HashSet<ActionsManager>();
@Override
public int getPriority() {
return ConfiguratorWithPriority.MEDIUM_PRIORITY;
}
@Override
public void configure(ActionsManager manager, Tree browser) {
Node no = browser.getSelectedNode();
if (no == null)
return;
Object o = no.getUserObject();
if (!(o instanceof Element))
return;
addElementActions(manager, (Element)o);
}
@Override
public void configure(ActionsManager manager, DiagramPresentationElement diagram,
PresentationElement[] selected, PresentationElement requestor) {
if (requestor != null) {
Element e = requestor.getElement();
addElementActions(manager, e);
} else {
addDiagramActions(manager, diagram);
}
}
private void addElementActions(ActionsManager manager, Element e) {
Project prj = Project.getProject(e);
if (prj == null)
return;
Stereotype sysmlview = Utils.getViewStereotype();
Stereotype sysmlviewpoint = Utils.getViewpointStereotype();
Stereotype documentView = StereotypesHelper.getStereotype(prj, DocGen3Profile.documentViewStereotype,
"Document Profile");
if (e == null)
return;
if (ViewEditUtils.isPasswordSet()) {
if (MDUtils.isDeveloperMode()) {
ActionsCategory modelLoad = myCategory(manager, "AlfrescoModel", "MMS");
if (manager.getActionFor(ExportModelAction.actionid) == null)
modelLoad.addAction(new ExportModelAction(e));
if (e instanceof Model && manager.getActionFor(InitializeProjectAction.actionid) == null)
modelLoad.addAction(new InitializeProjectAction());
if (manager.getActionFor(ValidateModelAction.actionid) == null)
modelLoad.addAction(new ValidateModelAction(e));
} else if (e instanceof Model) {
ActionsCategory modelLoad = myCategory(manager, "AlfrescoModel", "MMS");
if (manager.getActionFor(ValidateModelAction.actionid) == null)
modelLoad.addAction(new ValidateModelAction(e));
}
}
// add menus in reverse order since they are inserted at top
// View Interaction menu
if (StereotypesHelper.hasStereotypeOrDerived(e, DocGen3Profile.validationScriptStereotype)) {
ActionsCategory c = myCategory(manager, "ViewInteraction", "View Interaction");
UserScript us = new UserScript();
us.setDgElement(e);
List<Element> targets = Utils.collectDirectedRelatedElementsByRelationshipStereotypeString(e,
DocGen3Profile.queriesStereotype, 1, false, 1);
targets.addAll(Utils.collectDirectedRelatedElementsByRelationshipStereotypeString(e,
DocGen3Profile.oldQueriesStereotype, 1, false, 1));
us.setTargets(targets);
if (manager.getActionFor(RunUserValidationScriptAction.actionid) == null)
c.addAction(new RunUserValidationScriptAction(us, true));
} else if (StereotypesHelper.hasStereotypeOrDerived(e, DocGen3Profile.userScriptStereotype)) {
ActionsCategory c = myCategory(manager, "ViewInteraction", "View Interaction");
UserScript us = new UserScript();
us.setDgElement(e);
List<Element> targets = Utils.collectDirectedRelatedElementsByRelationshipStereotypeString(e,
DocGen3Profile.queriesStereotype, 1, false, 1);
targets.addAll(Utils.collectDirectedRelatedElementsByRelationshipStereotypeString(e,
DocGen3Profile.oldQueriesStereotype, 1, false, 1));
us.setTargets(targets);
if (manager.getActionFor(RunUserScriptAction.actionid) == null)
c.addAction(new RunUserScriptAction(us, true));
}
if (StereotypesHelper.hasStereotypeOrDerived(e, sysmlview)) {
// There may be no view query actions to add, in which case we need
// to avoid adding an empty menu category, so the category is
// removed in this case.
ActionsCategory category = (ActionsCategory)manager.getActionFor("ViewInteraction");
if (category == null) {
category = new MDActionsCategory("ViewInteraction", "View Interaction");
category.setNested(true);
boolean added = addViewQueryActions(manager, category, (NamedElement)e);
if (added)
manager.addCategory(0, category);
}
if (ViewEditUtils.isPasswordSet()) {
ActionsCategory modelLoad2 = myCategory(manager, "AlfrescoModel", "MMS");
NMAction action = manager.getActionFor(ValidateViewAction.actionid);
if (action == null)
modelLoad2.addAction(new ValidateViewAction(e));
action = manager.getActionFor(ValidateViewRecursiveAction.actionid);
if (action == null)
modelLoad2.addAction(new ValidateViewRecursiveAction(e));
}
//ActionsCategory c = myCategory(manager, "ViewEditor", "View Editor");
//action = manager.getActionFor(ExportViewAction.actionid);
//if (action == null)
//addEditableViewActions(c, (NamedElement)e);
}
/*if (StereotypesHelper.hasStereotype(e, ViewEditorProfile.project)) { // REVIEW
// --
// hasStereotypeOrDerived()?
ActionsCategory c = myCategory(manager, "ViewEditor", "View Editor");
NMAction act = manager.getActionFor(OrganizeViewEditorAction.actionid);
if (act == null)
c.addAction(new OrganizeViewEditorAction(e));
act = manager.getActionFor(DeleteProjectAction.actionid);
if (act == null)
c.addAction(new DeleteProjectAction(e));
act = manager.getActionFor(EMSLogoutAction.actionid);
if (act == null)
c.addAction(new EMSLogoutAction());
}
if (StereotypesHelper.hasStereotype(e, ViewEditorProfile.volume)) { // REVIEW
// --
// hasStereotypeOrDerived()?
ActionsCategory c = myCategory(manager, "ViewEditor", "View Editor");
NMAction act = manager.getActionFor(DeleteVolumeAction.actionid);
if (act == null)
c.addAction(new DeleteVolumeAction(e));
act = manager.getActionFor(EMSLogoutAction.actionid);
if (act == null)
c.addAction(new EMSLogoutAction());
}
if (StereotypesHelper.hasStereotype(e, ViewEditorProfile.document)
|| StereotypesHelper.hasStereotypeOrDerived(e, documentView)) {
ActionsCategory c = myCategory(manager, "ViewEditor", "View Editor");
NMAction act = manager.getActionFor(DeleteDocumentAction.actionid);
if (act == null)
c.addAction(new DeleteDocumentAction(e));
if (StereotypesHelper.hasStereotypeOrDerived(e, documentView)) {
act = manager.getActionFor(OrganizeDocumentAction.actionid);
if (act == null)
c.addAction(new OrganizeDocumentAction(e));
}
}*/
// DocGen menu
if ((e instanceof Activity && StereotypesHelper.hasStereotypeOrDerived(e,
DocGen3Profile.documentStereotype)) || StereotypesHelper.hasStereotypeOrDerived(e, sysmlview)) {
NMAction act = null;
ActionsCategory c = myCategory(manager, "DocGen", "DocGen");
// DefaultPropertyResourceProvider pp = new
// DefaultPropertyResourceProvider();
act = manager.getActionFor(ValidateDocument3Action.actionid);
if (act == null)
c.addAction(new ValidateDocument3Action(e));
act = manager.getActionFor(ValidateViewStructureAction.actionid);
if (act == null)
c.addAction(new ValidateViewStructureAction(e));
act = manager.getActionFor(ViewDocument3Action.actionid);
if (act == null)
c.addAction(new ViewDocument3Action(e));
act = manager.getActionFor(GenerateDocumentAction.actionid);
if (act == null)
c.addAction(new GenerateDocumentAction(e));
if (StereotypesHelper.hasStereotype(e, documentView)) {
/*
* act = manager.getActionFor(PublishDocWebAction.actionid); if
* (act == null) c.addAction(new
* PublishDocWebAction((NamedElement)e));
*/
act = manager.getActionFor(NumberDependencyAction.actionid);
if (act == null)
c.addAction(new NumberDependencyAction(e));
}
/*
* if (e instanceof Activity &&
* StereotypesHelper.hasStereotypeOrDerived(e,
* DocGen3Profile.documentStereotype)) { act =
* manager.getActionFor(PublishDocWebAction.actionid); if (act ==
* null) c.addAction(new PublishDocWebAction((NamedElement)e)); }
*/
}
if (StereotypesHelper.hasStereotypeOrDerived(e, sysmlviewpoint)) {
ActionsCategory c = myCategory(manager, "DocGen", "DocGen");
NMAction act = manager.getActionFor(InstanceViewpointAction.actionid);
if (act == null)
c.addAction(new InstanceViewpointAction(e));
}
// if ( ( e instanceof Activity &&
// StereotypesHelper.hasStereotypeOrDerived( e,
// DocGen3Profile.documentStereotype ) ) ||
// StereotypesHelper.hasStereotypeOrDerived( e, sysmlview ) ) {
// ActionsCategory c = myCategory( manager, "DocGen", "DocGen" );
// NMAction act = manager.getActionFor( "DocGenComments" );
// if ( act == null ) addCommentActions( c, (NamedElement)e );
}
private void addDiagramActions(ActionsManager manager, DiagramPresentationElement diagram) {
if (diagram == null)
return;
Element element = diagram.getActualElement();
if (element == null)
return;
Element owner = element.getOwner();
if (owner == null || !(owner instanceof NamedElement))
return;
// //this add actions for syncing to docweb comments
// if (StereotypesHelper.hasStereotypeOrDerived(owner,
// DocGen3Profile.documentViewStereotype)) {
// ActionsCategory category = myCategory(manager, "DocGen", "DocGen");
// NMAction action = manager.getActionFor("DocGenComments");
// if (action == null)
// addCommentActions(category, (NamedElement) owner);
}
/**
* add actions related to view editor (this includes view comments)
*
* @param parent
* @param e
*/
private void addEditableViewActions(ActionsCategory parent, NamedElement e) {
ActionsCategory c = parent; // new ActionsCategory("EditableView",
// "Editable View");
c.addAction(new ImportViewDryAction(e));
c.addAction(new ExportViewAction(e));
c.addAction(new ExportViewHierarchyAction(e));
c.addAction(new ImportViewAction(e));
c.addAction(new SynchronizeViewAction(e));
c.addAction(new ExportViewCommentsAction(e));
c.addAction(new ImportViewCommentsAction(e));
c.addAction(new ViewViewCommentsAction(e));
ActionsCategory a = new MDActionsCategory("AdvanceEditor", "ModelLoad");
a.setNested(true);
a.addAction(new ImportViewRecursiveAction(e));
a.addAction(new ExportViewRecursiveAction(e));
a.addAction(new SynchronizeViewRecursiveAction(e));
c.addAction(a);
// c.setNested(true);
// synchronized (this) { // saw a concurrency error at some point
}
/**
* Gets the specified category, creates it if necessary.
*
* @param manager
* @param id
* @param name
* @return category with given id/name
*/
private ActionsCategory myCategory(ActionsManager manager, String id, String name) {
ActionsCategory category = (ActionsCategory)manager.getActionFor(id); // getCategory(id);
if (category == null) {
category = new MDActionsCategory(id, name);
category.setNested(true);
manager.addCategory(0, category);
}
return category;
}
/**
* this should be used to add actions that're possible when user right
* clicks on a view<br/>
* it parses the single view, gets any document model that'll result in
* running script, editable table, validation rule
*
* @param parent
* @param e
*/
private boolean addViewQueryActions(ActionsManager manager, ActionsCategory parent, NamedElement e) {
if (viewQueryCalled.contains(manager))
return false;
DocumentGenerator dg = new DocumentGenerator(e, null, null);
Document dge = dg.parseDocument(true, false);
CollectActionsVisitor cav = new CollectActionsVisitor();
dge.accept(cav);
boolean added = false;
if (cav.getActions().size() > 0) {
for (MDAction a: cav.getActions()) {
parent.addAction(a);
}
added = true;
}
parent.setNested(true);
viewQueryCalled.clear();
viewQueryCalled.add(manager);
return added;
}
}
|
package hdm.pk070.jscheme.setup;
import hdm.pk070.jscheme.error.SchemeError;
import hdm.pk070.jscheme.obj.builtin.function.base.SchemeBuiltinEq;
import hdm.pk070.jscheme.obj.builtin.function.list.SchemeBuiltinCons;
import hdm.pk070.jscheme.obj.builtin.function.list.SchemeBuiltinGetCar;
import hdm.pk070.jscheme.obj.builtin.function.list.SchemeBuiltinGetCdr;
import hdm.pk070.jscheme.obj.builtin.function.list.SchemeBuiltinIsCons;
import hdm.pk070.jscheme.obj.builtin.function.math.*;
import hdm.pk070.jscheme.obj.builtin.simple.SchemeSymbol;
import hdm.pk070.jscheme.obj.builtin.syntax.SchemeBuiltinDefine;
import hdm.pk070.jscheme.obj.builtin.syntax.SchemeBuiltinIf;
import hdm.pk070.jscheme.obj.builtin.syntax.SchemeBuiltinLambda;
import hdm.pk070.jscheme.obj.builtin.syntax.SchemeBuiltinQuote;
import hdm.pk070.jscheme.table.environment.GlobalEnvironment;
import hdm.pk070.jscheme.table.environment.entry.EnvironmentEntry;
import hdm.pk070.jscheme.table.symbolTable.SchemeSymbolTable;
/**
* @author patrick.kleindienst
*/
public final class JSchemeSetup {
public static void init() throws SchemeError {
registerBuiltinFunctions();
registerBuiltinSyntax();
printWelcomeScreen();
}
private static void registerBuiltinFunctions() throws SchemeError {
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new SchemeSymbol
("+")), SchemeBuiltinPlus.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new SchemeSymbol
("-")), SchemeBuiltinMinus.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new SchemeSymbol
("*")), SchemeBuiltinTimes.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new SchemeSymbol
("/")), SchemeBuiltinDivide.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new SchemeSymbol
("abs")), SchemeBuiltinAbsolute.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new SchemeSymbol
("cons")), SchemeBuiltinCons.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new SchemeSymbol
("car")), SchemeBuiltinGetCar.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new SchemeSymbol
("cdr")), SchemeBuiltinGetCdr.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new SchemeSymbol
("cons?")), SchemeBuiltinIsCons.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new
SchemeSymbol("eq?")), SchemeBuiltinEq.create()));
}
private static void registerBuiltinSyntax() throws SchemeError {
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new
SchemeSymbol("define")), SchemeBuiltinDefine.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new
SchemeSymbol("if")), SchemeBuiltinIf.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new
SchemeSymbol("lambda")), SchemeBuiltinLambda.create()));
GlobalEnvironment.getInstance().add(EnvironmentEntry.create(SchemeSymbolTable.getInstance().add(new
SchemeSymbol("quote")), SchemeBuiltinQuote.create()));
}
private static void printWelcomeScreen() {
System.out.println();
System.out.println(" *****************************************");
System.out.println("
System.out.println("
System.out.println("
System.out.println("
System.out.println("
System.out.println(" *****************************************");
System.out.println();
}
}
|
package in.twizmwaz.cardinal.util;
import org.bukkit.ChatColor;
import org.bukkit.Material;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.inventory.meta.PotionMeta;
import org.bukkit.potion.Potion;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import org.jdom2.Element;
import java.util.ArrayList;
public class ParseUtils {
public static ItemStack getItem(Element element) {
int amount = 1;
if (element.getAttributeValue("amount") != null) {
try {
amount = Integer.parseInt(element.getAttributeValue("amount"));
} catch (NumberFormatException e) {
}
}
ItemStack itemStack = new ItemStack(Material.matchMaterial(element.getText()), amount);
if (element.getAttributeValue("damage") != null) {
itemStack.setDurability(Short.parseShort(element.getAttributeValue("damage")));
}
try {
for (String raw : element.getAttributeValue("enchantment").split(";")) {
String[] enchant = raw.split(":");
try {
itemStack.addUnsafeEnchantment(in.parapengu.commons.utils.StringUtils.convertStringToEnchantment(enchant[0]), Integer.parseInt(enchant[1]));
} catch (ArrayIndexOutOfBoundsException e) {
itemStack.addUnsafeEnchantment(in.parapengu.commons.utils.StringUtils.convertStringToEnchantment(enchant[0]), 1);
}
}
} catch (NullPointerException e) {
}
ItemMeta meta = itemStack.getItemMeta();
if (element.getAttributeValue("name") != null) {
meta.setDisplayName(ChatColor.translateAlternateColorCodes('`', element.getAttributeValue("name")));
}
if (element.getAttributeValue("lore") != null) {
ArrayList<String> lore = new ArrayList<>();
for (String raw : element.getAttributeValue("lore").split("\\|")) {
String colored = ChatColor.translateAlternateColorCodes('`', raw);
lore.add(colored);
}
meta.setLore(lore);
}
int slot = element.getAttributeValue("slot") != null ? Integer.parseInt(element.getAttributeValue("slot")) : -1;
if (element.getAttributeValue("potions") != null) {
String potions = element.getAttributeValue("potions");
if (potions.contains(";")) {
for (String potion : potions.split(";")) {
String[] parse = potion.split(":");
PotionEffect effect = new PotionEffect(PotionEffectType.getByName(parse[0].toUpperCase().replaceAll(" ", "_")), Integer.parseInt(parse[1]) * 20, Integer.parseInt(parse[2]) - 1);
((PotionMeta) meta).addCustomEffect(effect, true);
}
} else {
String[] parse = potions.split(":");
PotionEffect effect = new PotionEffect(PotionEffectType.getByName(parse[0].toUpperCase().replaceAll(" ", "_")), Integer.parseInt(parse[1]) * 20, Integer.parseInt(parse[2]) - 1);
((PotionMeta) meta).addCustomEffect(effect, true);
}
}
itemStack.setItemMeta(meta);
return itemStack;
}
}
|
package io.openshift.booster;
import io.vertx.core.AbstractVerticle;
import io.vertx.core.Future;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.web.Router;
import io.vertx.ext.web.RoutingContext;
import io.vertx.ext.web.handler.StaticHandler;
import static io.vertx.core.http.HttpHeaders.CONTENT_TYPE;
public class HttpApplication extends AbstractVerticle {
protected static final String template = "Hello from Brazil, %s!";
@Override
public void start(Future<Void> future) {
// Create a router object.
Router router = Router.router(vertx);
router.get("/api/greeting").handler(this::greeting);
|
package it.near.sdk.Geopolis;
import android.app.Application;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.net.Uri;
import org.altbeacon.beacon.Region;
import org.json.JSONException;
import org.json.JSONObject;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import cz.msebera.android.httpclient.Header;
import cz.msebera.android.httpclient.auth.AuthenticationException;
import it.near.sdk.Communication.NearJsonHttpResponseHandler;
import it.near.sdk.Geopolis.Beacons.AltBeaconMonitor;
import it.near.sdk.Geopolis.GeoFences.GeoFenceMonitor;
import it.near.sdk.Communication.Constants;
import it.near.sdk.Communication.NearAsyncHttpClient;
import it.near.sdk.Communication.NearNetworkUtil;
import it.near.sdk.Geopolis.GeoFences.GeoFenceSystemEventsReceiver;
import it.near.sdk.Geopolis.Beacons.Ranging.ProximityListener;
import it.near.sdk.GlobalConfig;
import it.near.sdk.MorpheusNear.Morpheus;
import it.near.sdk.Recipes.RecipesManager;
import it.near.sdk.Trackings.Events;
import it.near.sdk.Utils.NearJsonAPIUtils;
import it.near.sdk.Utils.ULog;
/**
* Manages a beacon forest, the plugin for monitoring regions structured in a tree.
* This region structure was made up to enable background monitoring of more than 20 regions on iOS.
* In this plugin every region is specified by ProximityUUID, minor and major. It the current implementation every region is a beacon.
* The AltBeacon altBeaconMonitor is initialized with this setting:
* - The background period between scans is 8 seconds.
* - The background length of a scan 1 second.
* - The period to wait before finalizing a region exit.
*
* In our current plugin representation:
* - this is a "pulse" plugin
* - the plugin name is: beacon-forest
* - the only supported action is: enter_region
* - the bundle is the id of a region
*
* @author cattaneostefano
*/
public class GeopolisManager {
public static final String BEACON_FOREST_PATH = "beacon-forest";
public static final String BEACON_FOREST_TRACKINGS = "trackings";
public static final String BEACON_FOREST_BEACONS = "beacons";
private static final String TAG = "GeopolisManager";
private static final String PREFS_SUFFIX = "GeopolisManager";
private static final String PLUGIN_NAME = "geopolis";
private static final String RADAR_ON = "radar_on";
private static final String GEOPOLIS_CONFIG = "cached_config";
public static final String GF_ENTRY_ACTION_SUFFIX = "REGION_ENTRY";
public static final String GF_EXIT_ACTION_SUFFIX = "REGION_EXIT";
public static final String BT_ENTRY_ACTION_SUFFIX = "BT_REGION_ENTRY";
public static final String BT_EXIT_ACTION_SUFFIX = "BT_REGION_EXIT";
public static final String GF_RANGE_FAR_SUFFIX = "RANGE_FAR";
public static final String GF_RANGE_NEAR_SUFFIX = "RANGE_NEAR";
public static final String GF_RANGE_IMMEDIATE_SUFFIX = "RANGE_IMMEDIATE";
public static final String NODE_ID = "identifier";
private final RecipesManager recipesManager;
private final SharedPreferences sp;
private final SharedPreferences.Editor editor;
private List<Region> regionList;
private Application mApplication;
private Morpheus morpheus;
private AltBeaconMonitor altBeaconMonitor;
private final GeoFenceMonitor geofenceMonitor;
private NodesManager nodesManager;
private NearAsyncHttpClient httpClient;
private List<ProximityListener> proximityListeners = new ArrayList<>();
public GeopolisManager(Application application, RecipesManager recipesManager) {
this.mApplication = application;
this.recipesManager = recipesManager;
this.nodesManager = new NodesManager(application);
this.altBeaconMonitor = new AltBeaconMonitor(application, nodesManager);
this.geofenceMonitor = new GeoFenceMonitor(application);
registerProximityReceiver();
registerResetReceiver();
String PACK_NAME = mApplication.getApplicationContext().getPackageName();
String PREFS_NAME = PACK_NAME + PREFS_SUFFIX;
sp = mApplication.getSharedPreferences(PREFS_NAME, 0);
editor = sp.edit();
httpClient = new NearAsyncHttpClient();
refreshConfig();
}
private void registerProximityReceiver() {
IntentFilter regionFilter = new IntentFilter();
String packageName = mApplication.getPackageName();
regionFilter.addAction(packageName + "." + GF_ENTRY_ACTION_SUFFIX);
regionFilter.addAction(packageName + "." + GF_EXIT_ACTION_SUFFIX);
regionFilter.addAction(packageName + "." + BT_ENTRY_ACTION_SUFFIX);
regionFilter.addAction(packageName + "." + BT_EXIT_ACTION_SUFFIX);
regionFilter.addAction(packageName + "." + GF_RANGE_FAR_SUFFIX);
regionFilter.addAction(packageName + "." + GF_RANGE_NEAR_SUFFIX);
regionFilter.addAction(packageName + "." + GF_RANGE_IMMEDIATE_SUFFIX);
mApplication.registerReceiver(regionEventsReceiver, regionFilter);
}
private void registerResetReceiver() {
IntentFilter resetFilter = new IntentFilter();
String packageName = mApplication.getPackageName();
resetFilter.addAction(packageName + "." + GeoFenceSystemEventsReceiver.RESET_MONITOR_ACTION_SUFFIX);
mApplication.registerReceiver(resetEventReceiver, resetFilter);
}
/**
* Refresh the configuration of the component. The list of beacons to altBeaconMonitor will be downloaded from the APIs.
* If there's an error in refreshing the configuration, a cached version will be used instead.
*
*/
public void refreshConfig(){
Uri url = Uri.parse(Constants.API.PLUGINS_ROOT).buildUpon()
.appendPath(BEACON_FOREST_PATH)
.appendPath(BEACON_FOREST_BEACONS).build();
url = Uri.parse(Constants.API.PLUGINS_ROOT).buildUpon()
.appendPath("geopolis")
.appendPath("nodes")
.appendQueryParameter("filter[app_id]",GlobalConfig.getInstance(mApplication).getAppId())
.appendQueryParameter("include", "children.*.children")
.build();
try {
httpClient.nearGet(mApplication, url.toString(), new NearJsonHttpResponseHandler(){
@Override
public void onSuccess(int statusCode, Header[] headers, JSONObject response) {
ULog.d(TAG, response.toString());
List<Node> nodes = nodesManager.parseAndSetNodes(response);
startRadarOnNodes(nodes);
}
@Override
public void onFailureUnique(int statusCode, Header[] headers, Throwable throwable, String responseString) {
ULog.d(TAG, "Error " + statusCode);
// load the config
startRadarOnNodes(nodesManager.getNodes());
}
});
} catch (AuthenticationException e) {
e.printStackTrace();
}
}
public void startRadarOnNodes(List<Node> nodes) {
if (nodes == null) return;
geofenceMonitor.setUpMonitor(GeoFenceMonitor.filterGeofence(nodes));
// altBeaconMonitor.setUpMonitor(nodes);
}
public void startRadar(){
if (isRadarStarted(mApplication)) return;
setRadarState(true);
List<Node> nodes = nodesManager.getNodes();
// altBeaconMonitor.setUpMonitor(nodes);
geofenceMonitor.setUpMonitor(GeoFenceMonitor.filterGeofence(nodes));
geofenceMonitor.startGFRadar();
}
public void stopRadar(){
setRadarState(false);
altBeaconMonitor.stopRadar();
geofenceMonitor.stopGFRadar();
}
/**
* Notify the RECIPES_PATH manager of the occurance of a registered pulse.
* @param pulseAction the action of the pulse to notify
* @param pulseBundle the region identifier of the pulse
*/
private void firePulse(String pulseAction, String pulseBundle) {
ULog.d(TAG, "firePulse!");
recipesManager.gotPulse(PLUGIN_NAME, pulseAction, pulseBundle);
}
BroadcastReceiver regionEventsReceiver = new BroadcastReceiver() {
public static final String TAG = "RegionEventReceiver";
@Override
public void onReceive(Context context, Intent intent) {
ULog.wtf(this.TAG, "receiverEvent");
if (!intent.hasExtra(NODE_ID)) return;
// trim the package name
String packageName = mApplication.getPackageName();
String action = intent.getAction().replace(packageName + ".", "");
Node node = nodesManager.nodeFromId(intent.getStringExtra(NODE_ID));
switch (action){
case GF_ENTRY_ACTION_SUFFIX:
if (node == null) return;
trackAndFirePulse(node, Events.ENTER_PLACE);
if (node.getChildren() != null){
geofenceMonitor.setUpMonitor(GeoFenceMonitor.geofencesOnEnter(nodesManager.getNodes(), node));
altBeaconMonitor.addRegions(node.getChildren());
}
break;
case GF_EXIT_ACTION_SUFFIX:
if (node == null) return;
trackAndFirePulse(node, Events.LEAVE_PLACE);
geofenceMonitor.setUpMonitor(GeoFenceMonitor.geofencesOnExit(nodesManager.getNodes(), node));
altBeaconMonitor.removeRegions(node.getChildren());
break;
case BT_ENTRY_ACTION_SUFFIX:
if (node == null) return;
trackAndFirePulse(node, Events.ENTER_REGION);
break;
case BT_EXIT_ACTION_SUFFIX:
if (node == null) return;
trackAndFirePulse(node, Events.LEAVE_REGION);
break;
case GF_RANGE_FAR_SUFFIX:
trackAndFirePulse(node, Events.RANGE_FAR);
break;
case GF_RANGE_NEAR_SUFFIX:
trackAndFirePulse(node, Events.RANGE_NEAR);
break;
case GF_RANGE_IMMEDIATE_SUFFIX:
trackAndFirePulse(node, Events.RANGE_IMMEDIATE);
break;
}
}
};
/**
* Tracks the geographical interaction and fires the proper pulse. It does nothing if the identifier is null.
* @param node
* @param event
*/
private void trackAndFirePulse(Node node, String event) {
if (node != null || node.getIdentifier() != null){
trackEvent(node.getIdentifier(), event);
firePulse(event, node.getIdentifier());
}
}
private BroadcastReceiver resetEventReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
ULog.wtf(TAG, "reset intent received");
if (intent.getBooleanExtra(GeoFenceSystemEventsReceiver.LOCATION_STATUS, false)){
startRadarOnNodes(nodesManager.getNodes());
} else {
altBeaconMonitor.stopRadar();
geofenceMonitor.stopGFRadar();
}
}
};
/**
* Send tracking data to the Forest beacon APIs about a region enter (every beacon is a region).
*/
private void trackEvent(String identifier, String event) {
try {
Uri url = Uri.parse(Constants.API.PLUGINS_ROOT).buildUpon()
.appendPath(BEACON_FOREST_PATH)
.appendPath(BEACON_FOREST_TRACKINGS).build();
NearNetworkUtil.sendTrack(mApplication, url.toString(), buildTrackBody(identifier, event));
} catch (JSONException e) {
ULog.d(TAG, "Unable to send track: " + e.toString());
}
}
/**
* Compute the HTTP request body from the region identifier in jsonAPI format.
* @param identifier the node identifier
* @param event the event
* @return the correctly formed body
* @throws JSONException
*/
private String buildTrackBody(String identifier, String event) throws JSONException {
HashMap<String, Object> map = new HashMap<>();
map.put("identifier" , identifier);
map.put("event", event);
DateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
Date now = new Date(System.currentTimeMillis());
String formatted = sdf.format(now);
map.put("tracked_at", formatted);
map.put("profile_id", GlobalConfig.getInstance(mApplication).getProfileId());
map.put("installation_id", GlobalConfig.getInstance(mApplication).getInstallationId());
return NearJsonAPIUtils.toJsonAPI("trackings", map);
}
/**
* Returns wether the app started the location radar.
* @param context
* @return
*/
public static boolean isRadarStarted(Context context){
String PACK_NAME = context.getApplicationContext().getPackageName();
SharedPreferences sp = context.getSharedPreferences(PACK_NAME + PREFS_SUFFIX, 0);
return sp.getBoolean(RADAR_ON, false);
}
public void setRadarState(boolean b){
String PACK_NAME = mApplication.getApplicationContext().getPackageName();
SharedPreferences.Editor edit = mApplication.getSharedPreferences(PACK_NAME + PREFS_SUFFIX, 0).edit();
edit.putBoolean(RADAR_ON, b).apply();
}
}
|
package org.rtevo.genetics;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.rtevo.simulation.Result;
import org.rtevo.util.RandUtil;
/**
* @author Jan Corazza & Luka Bubalo
*
*/
public class ChromosomeFactory {
// percentage of the generation on the bottom that will be excluded from the
// next generation
private static double cutoff = 20;
public static List<Chromosome> random(int n) {
ArrayList<Chromosome> chromosomes = new ArrayList<Chromosome>();
for (int i = 0; i < n; ++i) {
chromosomes.add(Chromosome.random());
}
return chromosomes;
}
public static List<Chromosome> evolve(List<Result> results) {
// 1. sort the array based on metersPassed
// 2. remove the below ChromosomeFactory.cutoff percent
// 3. select ChromosomeFactory.cutoff percent from the remaining
// chromosomes based on their metersPassed
// 4. mutate the selected chromosomes and add the mutations to the list
int originalSize = results.size();
int remove = (int) (cutoff / 100.0 * originalSize);
List<Result> myResults = new ArrayList<Result>(results);
// best results first
Collections.sort(myResults);
// remove the worst cutoff percent
if (originalSize > remove) {
// no need to copy since no modification is done
myResults = myResults.subList(0, originalSize - remove);
}
List<Chromosome> chromosomes = new ArrayList<Chromosome>();
for (Result result : myResults) {
chromosomes.add(result.chromosome);
}
while (chromosomes.size() != originalSize) {
int outreach = RandUtil.random(1, myResults.size());
int need = originalSize - chromosomes.size();
if (outreach > need) {
outreach = need;
}
List<Result> toAdd = myResults.subList(0, outreach);
for (Result result : toAdd) {
chromosomes.add(mutate(result.chromosome));
}
}
return chromosomes;
}
/**
* Returns a mutated chromosome.
*
* @param chromosome
* Chromosome object to mutate
* @return new Chromosome object that has been mutated from 1st parameter
*/
public static Chromosome mutate(Chromosome chromosome) {
//making new chromosome which will be mutated
Chromosome mutated = new Chromosome();
for( PartJoint i : chromosome.partJoints ) {
PartJoint toMutateJ = new PartJoint();
Part toMutateOneP = new Part();
Part toMutateTwoP = new Part();
toMutateJ.rotateFrom = i.rotateFrom;
toMutateJ.rotateTo = i.rotateTo;
toMutateJ.angularVelocity = i.angularVelocity;
toMutateJ.percentOne = i.percentOne;
toMutateJ.percentTwo = i.percentTwo;
toMutateOneP.height = i.partOne.height;
toMutateOneP.width = i.partOne.width;
toMutateTwoP.height = i.partTwo.height;
toMutateTwoP.width = i.partTwo.width;
mutated.parts.add(toMutateOneP);
mutated.parts.add(toMutateTwoP);
mutated.partJoints.add(toMutateJ);
}
for( Part i: chromosome.parts) {
Part toMutateP = new Part();
toMutateP.width = i.width;
toMutateP.height = i.height;
mutated.parts.add(toMutateP);
}
//iterating through partJoints arraylist
for( PartJoint i : mutated.partJoints ) {
//mutating rotateFrom
if( RandUtil.random(0f, 1f) < 0.2f ) {
float rotateFromToMutate = RandUtil.random(-0.05f, 0.05f);
if( i.rotateFrom + rotateFromToMutate > 0 ) { i.rotateFrom += rotateFromToMutate; }
}
//mutating rotateTo
if( RandUtil.random(0f, 1f) < 0.2f ) {
float rotateToToMutate = RandUtil.random(-0.05f, 0.05f);
if( i.rotateTo + rotateToToMutate > 0 ) { i.rotateTo += rotateToToMutate; }
}
//mutating percentOne
if( RandUtil.random(0f, 1f) < 0.2f ) {
float percentOneToMutate = RandUtil.random(-0.05f, 0.05f);
if( i.percentOne + percentOneToMutate > 0 && i.percentOne + percentOneToMutate < 1 ) {
i.percentOne += percentOneToMutate;
}
}
//mutating percentTwo
if( RandUtil.random(0f, 1f) < 0.2f ) {
float percentTwoToMutate = RandUtil.random(-0.05f, 0.05f);
if( i.percentTwo + percentTwoToMutate > 0 && i.percentTwo + percentTwoToMutate < 1 ) {
i.percentTwo += percentTwoToMutate;
}
}
//mutating angularVelocity
if( RandUtil.random(0f, 1f) < 0.2f ) {
float angularVelocityToMutate = RandUtil.random(-0.05f, 0.05f);
i.angularVelocity += angularVelocityToMutate;
}
}
for ( Part i : mutated.parts ) {
// mutating width
if( RandUtil.random(0f, 1f) < 0.2f ) {
float widthToMutate = RandUtil.random(-0.02f, 0.02f);
if( i.width + widthToMutate < 2f && i.width + widthToMutate > 0.5f ) {
i.width += widthToMutate;
}
}
//mutating height
if( RandUtil.random(0f, 1f) < 0.2f ) {
float heightToMutate = RandUtil.random(-0.02f, 0.02f);
if( i.height + heightToMutate < 2f && i.height + heightToMutate > 0.5f ) {
i.height += heightToMutate;
}
}
}
return mutated;
}
public static Chromosome crossover(Chromosome chromofirst, Chromosome chromosecond) {
return null;
}
}
|
package fterms;
import java.util.LinkedList;
import java.util.List;
import fterms.exceptions.FeatureTermException;
import java.io.File;
import java.io.FileWriter;
import java.util.HashMap;
import java.util.Random;
import java.util.Set;
import util.Pair;
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
/**
* This class implements the disintegration operation, which breaks a feature term into a set of properties as
* described in:
* - "On Similarity Measures based on a Refinement Lattice" by Santiago Ontanon and Enric Plaza (ICCBR 2009)
* - "Similarity over Refinement Graphs" by Santiago Ontanon and Enric Plaza (Machine Learning Journal, submitted)
* @author santi
*/
public class Disintegration {
public static int DEBUG = 0;
public static int s_reminderType = 1; // 0: unification-based (always works, but slow)
// 1: smart (uses fast, checks for correctness, and in case of failure, uses unification-based)
// 2: fast (fast, but might not work when breaking variable equalities)
public static HashMap<FeatureTerm,List<FeatureTerm>> propertiesAllTable = new HashMap<FeatureTerm,List<FeatureTerm>>();
public static HashMap<FeatureTerm,List<FeatureTerm>> propertiesFormalTable = new HashMap<FeatureTerm,List<FeatureTerm>>();
public static HashMap<FeatureTerm,List<FeatureTerm>> propertiesFastTable = new HashMap<FeatureTerm,List<FeatureTerm>>();
public static List<FeatureTerm> disintegrate(FeatureTerm f, FTKBase dm, Ontology o) throws FeatureTermException {
List<FeatureTerm> properties = null;
Pair<FeatureTerm, FeatureTerm> property_rest;
// First check cache:
properties = propertiesFormalTable.get(f);
if (properties!=null) return properties;
properties = new LinkedList<FeatureTerm>();
FeatureTerm unnamed = f.clone(dm, o);
List<FeatureTerm> variables = FTRefinement.variables(unnamed);
for(FeatureTerm v:variables) {
if (!dm.contains(v)) v.setName(null);
}
if (DEBUG>=2) {
System.out.println("Unnamed term to disintegrate:");
System.out.println(unnamed.toStringNOOS(dm));
}
do {
property_rest = extractProperty(unnamed, dm, o, s_reminderType);
if (property_rest != null) {
if (property_rest.m_a!=null) properties.add(property_rest.m_a);
unnamed = property_rest.m_b;
System.out.println(properties.size() + " properties (term now has " + FTRefinement.variables(unnamed).size() + " variables)");
// System.out.println(property_rest.m_a.toStringNOOS(dm));
// System.out.println(f.toStringNOOS(dm));
}
} while (property_rest != null);
// System.out.println(unnamed.toStringNOOS(dm));
propertiesFormalTable.put(f,properties);
return properties;
}
public static List<FeatureTerm> disintegrateAllRemainders(FeatureTerm f, FTKBase dm, Ontology o) throws FeatureTermException {
List<FeatureTerm> properties = null;
Pair<List<FeatureTerm>, FeatureTerm> property_rest;
// First check cache:
properties = propertiesAllTable.get(f);
if (properties!=null) return properties;
properties = new LinkedList<FeatureTerm>();
FeatureTerm unnamed = f.clone(dm, o);
List<FeatureTerm> variables = FTRefinement.variables(unnamed);
for(FeatureTerm v:variables) {
if (!dm.contains(v)) v.setName(null);
}
if (DEBUG>=2) {
System.out.println("Unnamed term to disintegrate:");
System.out.println(unnamed.toStringNOOS(dm));
}
do {
property_rest = extractAllProperties(unnamed, dm, o);
if (property_rest != null) {
if (property_rest.m_a!=null) properties.addAll(property_rest.m_a);
unnamed = property_rest.m_b;
System.out.println(properties.size() + " properties (term now has " + FTRefinement.variables(unnamed).size() + " variables");
// System.out.println(property_rest.m_a.toStringNOOS(dm));
// System.out.println(f.toStringNOOS(dm));
}
} while (property_rest != null);
// System.out.println(unnamed.toStringNOOS(dm));
propertiesAllTable.put(f,properties);
return properties;
}
public static List<Pair<FeatureTerm,FeatureTerm>> disintegrateWithTrace(FeatureTerm f, FTKBase dm, Ontology o) throws FeatureTermException {
List<Pair<FeatureTerm,FeatureTerm>> trace = null;
Pair<FeatureTerm, FeatureTerm> property_rest;
trace = new LinkedList<Pair<FeatureTerm,FeatureTerm>>();
FeatureTerm unnamed = f.clone(dm, o);
List<FeatureTerm> variables = FTRefinement.variables(unnamed);
for(FeatureTerm v:variables) {
if (!dm.contains(v)) v.setName(null);
}
if (DEBUG>=2) {
System.out.println("Unnamed term to disintegrate:");
System.out.println(unnamed.toStringNOOS(dm));
}
do {
property_rest = extractProperty(unnamed, dm, o, s_reminderType);
if (property_rest != null) {
if (property_rest.m_a!=null) trace.add(property_rest);
unnamed = property_rest.m_b;
System.out.println(trace.size() + " properties (term now has " + FTRefinement.variables(unnamed).size() + " variables");
}
} while (property_rest != null);
return trace;
}
public static List<FeatureTerm> disintegrate(FeatureTerm object, FTKBase dm, Ontology o, boolean cache, boolean fast) throws Exception {
long start_time = System.currentTimeMillis();
List<FeatureTerm> properties_tmp = null;
properties_tmp = propertiesFormalTable.get(object);
if (properties_tmp!=null) return properties_tmp;
if (object.getName()!=null && cache) {
String fname;
String fname_state;
FeatureTerm current_state = null;
if (fast) fname = "disintegration-cache/fast-"+object.getName();
else fname = "disintegration-cache/formal-" + object.getName();
if (fast) fname_state = "disintegration-cache/fast-"+object.getName()+"-state";
else fname_state = "disintegration-cache/formal-" + object.getName()+"-state";
File tmp_state = new File(fname_state);
File tmp = new File(fname);
// set up the current state:
if (tmp_state.exists() && tmp.exists()) {
// disintegration was abandoned in the middle
// load properties
FTKBase tmpBase = new FTKBase();
tmpBase.uses(dm);
tmpBase.ImportNOOS(fname, o);
properties_tmp = new LinkedList<FeatureTerm>();
properties_tmp.addAll(tmpBase.getAllTerms());
// load the last state:
FTKBase tmpBase_state = new FTKBase();
tmpBase_state.uses(dm);
tmpBase_state.ImportNOOS(fname_state, o);
current_state = tmpBase_state.getAllTerms().get(0);
System.out.println(properties_tmp.size() + " properties were already extracted. Continuing...");
} else {
// disintegration didn't start or it's complete:
if (tmp.exists()) {
// load properties
FTKBase tmpBase = new FTKBase();
tmpBase.uses(dm);
tmpBase.ImportNOOS(fname, o);
properties_tmp = new LinkedList<FeatureTerm>();
properties_tmp.addAll(tmpBase.getAllTerms());
if (DEBUG>=1) System.out.println(properties_tmp.size() + " properties were already extracted. Complete.");
} else {
properties_tmp = new LinkedList<FeatureTerm>();
current_state = object.clone(dm, o);
List<FeatureTerm> variables = FTRefinement.variables(current_state);
for(FeatureTerm v:variables) {
if (!dm.contains(v)) v.setName(null);
}
System.out.println("Disintegrating from scratch...");
}
}
while(current_state!=null) {
// extract a property:
Pair<FeatureTerm, FeatureTerm> property_rest;
if (fast) {
property_rest = Disintegration.extractPropertyFast(current_state, dm, o);
} else {
property_rest = Disintegration.extractProperty(current_state, dm, o, s_reminderType);
}
if (property_rest!=null) {
current_state = property_rest.m_b;
properties_tmp.add(property_rest.m_a);
System.out.println(properties_tmp.size() + " properties (term now has " + FTRefinement.variables(current_state).size() + " variables)");
// save the property
{
FileWriter fw = new FileWriter(fname,true);
fw.write(property_rest.m_a.toStringNOOS(dm)+"\n");
fw.flush();
fw.close();
}
// save the state
if (current_state!=null) {
FileWriter fw = new FileWriter(fname_state);
fw.write(current_state.toStringNOOS(dm)+"\n");
fw.flush();
fw.close();
} else {
tmp_state.delete();
}
} else {
current_state = null;
tmp_state.delete();
}
}
propertiesFormalTable.put(object,properties_tmp);
} else {
if (fast) properties_tmp = Disintegration.disintegrateFast(object, dm, o);
else properties_tmp = Disintegration.disintegrate(object, dm, o);
}
long disintegration_time = System.currentTimeMillis();
if (DEBUG>=1) System.out.println("Disintegration time: " + (disintegration_time-start_time));
return properties_tmp;
}
/*
* This method is like the one below, but follows the exat formulation used in out journal paper
*/
public static Pair<FeatureTerm, FeatureTerm> extractProperty(FeatureTerm f, FTKBase dm, Ontology o, int reminderType) throws FeatureTermException {
if (DEBUG>=1) System.out.println("extractPropertyFormal started...");
if (DEBUG>=2) {
System.out.println("Original term:");
System.out.println(f.toStringNOOS(dm));
}
List<FeatureTerm> refinements = FTRefinement.getSomeGeneralizationsAggressive(f, dm, o);
if (refinements.size() > 0) {
FeatureTerm refinement = refinements.get(0);
Pair<FeatureTerm, FeatureTerm> tmp = null;
switch(reminderType) {
case 0:
tmp = new Pair<FeatureTerm, FeatureTerm>(remainderUnification(f, refinement, dm, o), refinement);
break;
case 1:
tmp = new Pair<FeatureTerm, FeatureTerm>(remainderSmart(f, refinement, dm, o), refinement);
break;
default:
tmp = new Pair<FeatureTerm, FeatureTerm>(remainderFaster(f, refinement, dm, o), refinement);
break;
}
if (DEBUG>=2) {
System.out.println("Property:");
System.out.println(tmp.m_a.toStringNOOS(dm));
}
if (DEBUG>=1) System.out.println("extractPropertyFormal finished...");
return tmp;
} else {
if (DEBUG>=1) System.out.println("extractPropertyFormal finished... (null)");
return null;
}
}
/*
* This method is like the one below, but follows the exat formulation used in out journal paper
*/
public static Pair<List<FeatureTerm>, FeatureTerm> extractAllProperties(FeatureTerm f, FTKBase dm, Ontology o) throws FeatureTermException {
if (DEBUG>=1) System.out.println("extractAllProperties started...");
if (DEBUG>=2) {
System.out.println("Original term:");
System.out.println(f.toStringNOOS(dm));
}
List<FeatureTerm> refinements = FTRefinement.getSomeGeneralizationsAggressive(f, dm, o);
if (refinements.size() > 0) {
FeatureTerm refinement = refinements.get(0);
Pair<List<FeatureTerm>, FeatureTerm> tmp = null;
tmp = new Pair<List<FeatureTerm>, FeatureTerm>(allRemaindersUnification(f, refinement, dm, o), refinement);
if (DEBUG>=2) {
for(FeatureTerm p:tmp.m_a) {
System.out.println("Property:");
System.out.println(p.toStringNOOS(dm));
}
}
if (DEBUG>=1) System.out.println("extractPropertyFormal finished...");
return tmp;
} else {
if (DEBUG>=1) System.out.println("extractPropertyFormal finished... (null)");
return null;
}
}
/*
* This method computes the remainder by using the "unification" method. It returns the correct result, but it's very slow.
*/
public static FeatureTerm remainderUnification(FeatureTerm f, FeatureTerm refinement, FTKBase dm, Ontology o) throws FeatureTermException {
FeatureTerm oldRemainder = null;
FeatureTerm remainder = f;
do {
oldRemainder = remainder;
if (DEBUG>=2) System.out.println("remainder: cycle starts");
if (DEBUG>=3) {
System.out.println("refinement: ");
System.out.println(remainder.toStringNOOS(dm));
}
List<FeatureTerm> refinements = FTRefinement.getGeneralizationsAggressive(remainder, dm, o);
if (DEBUG>=3) System.out.println("remainder: " + refinements.size() + " refinements.");
remainder = null;
for (FeatureTerm r : refinements) {
if (DEBUG>=1) {
if (oldRemainder.subsumes(r)) {
System.err.println("A generalization refinement is more specific than the orignal term!!!!!");
System.err.println("Original:");
System.err.println(oldRemainder.toStringNOOS(dm));
System.err.println("Generalization:");
System.err.println(r.toStringNOOS(dm));
}
}
// If the refinement subsumes 'refinement', then their unification will never be 'f':
if (r.subsumes(refinement)) continue;
if (DEBUG>=3) System.out.println("remainder: starting unification...");
List<FeatureTerm> unifications = FTUnification.unification(refinement, r, dm);
if (unifications==null) {
if (DEBUG>=3) System.out.println("remainder: 0 unifications");
continue;
}
if (DEBUG>=3) System.out.println("remainder: " + unifications.size() + " unifications");
for (FeatureTerm u : unifications) {
if (u.equivalents(f)) {
remainder = r;
break;
}
}
if (remainder != null) break;
}
} while (remainder != null);
// System.out.println("f: ");
// System.out.println(f.toStringNOOS(dm));
// System.out.println("refinement: ");
// System.out.println(refinement.toStringNOOS(dm));
// System.out.println("Remainder: ");
// System.out.println(oldRemainder.toStringNOOS(dm));
return oldRemainder;
}
/*
* This method computes all the possible remainders by using the "unification" method. It returns the correct result, but it's very, very slow.
*/
public static List<FeatureTerm> allRemaindersUnification(FeatureTerm f, FeatureTerm refinement, FTKBase dm, Ontology o) throws FeatureTermException {
List<FeatureTerm> stack = new LinkedList<FeatureTerm>();
List<FeatureTerm> remainders = new LinkedList<FeatureTerm>();
stack.add(f);
do {
FeatureTerm c = stack.remove(0);
if (DEBUG>=2) System.out.println("allRemaindersUnification: cycle starts (stack,rem " + stack.size() + "," + remainders.size() + ")");
if (DEBUG>=3) {
System.out.println("current: ");
System.out.println(c.toStringNOOS(dm));
}
// check for: 1) unification recovers f, 2) not subsumed by any remainder already computed
boolean found = false;
for(FeatureTerm remainder:remainders) {
if (remainder.subsumes(c)) {
found = true;
if (DEBUG>=2) System.out.println("allRemaindersUnification: subsumed by a previous remainder...");
break;
}
}
if (found) continue;
if (DEBUG>=3) System.out.println("allRemaindersUnification: starting unification...");
List<FeatureTerm> unifications = FTUnification.unification(refinement, c, dm);
if (unifications==null) {
if (DEBUG>=3) System.out.println("remainder: 0 unifications");
continue;
}
if (DEBUG>=3) System.out.println("remainder: " + unifications.size() + " unifications");
found = false;
for (FeatureTerm u : unifications) {
if (u.equivalents(f)) {
// add to remainders:
if (DEBUG>=3) {
System.out.println("New remainder: ");
System.out.println(c.toStringNOOS(dm));
}
List<FeatureTerm> toDelete = new LinkedList<FeatureTerm>();
for(FeatureTerm remainder:remainders) {
if (c.subsumes(remainder)) toDelete.add(remainder);
}
remainders.removeAll(toDelete);
toDelete.clear();
for(FeatureTerm t:stack) {
if (c.subsumes(t)) toDelete.add(t);
}
stack.removeAll(toDelete);
remainders.add(c);
found = true;
break;
}
}
if (found) {
List<FeatureTerm> refinements = FTRefinement.getGeneralizationsAggressive(c, dm, o);
stack.addAll(0,refinements);
if (DEBUG>=2) System.out.println("allRemaindersUnification: " + refinements.size() + " refinements.");
}
} while (!stack.isEmpty());
return remainders;
}
// 'refinement' is a generlization of 'f':
public static FeatureTerm remainderFaster(FeatureTerm f, FeatureTerm refinement, FTKBase dm, Ontology o) throws FeatureTermException {
FeatureTerm oldRemainder = null;
FeatureTerm remainder = f;
// If any of these terms are candidate unifications, then the property cannot recover the original term:
List<FeatureTerm> originalGeneralizations = FTRefinement.getGeneralizationsAggressive(f, dm, o);
// Only those generalizations of 'f', which are subsumed by 'refinement'
List<FeatureTerm> originalGeneralizationsFiltered = new LinkedList<FeatureTerm>();
for(FeatureTerm g:originalGeneralizations) {
if (refinement.subsumes(g)) originalGeneralizationsFiltered.add(g);
}
do {
oldRemainder = remainder;
if (DEBUG>=3) {
System.out.println("remainder: cycle starts");
System.out.println("refinement: ");
System.out.println(remainder.toStringNOOS(dm));
}
List<FeatureTerm> refinements = FTRefinement.getGeneralizationsAggressive(remainder, dm, o);
if (DEBUG>=3)
System.out.println("remainder: " + refinements.size() + " refinements.");
remainder = null;
for (FeatureTerm r : refinements) {
boolean canRecover = true;
// Sanity test:
/* if (!(refinement.subsumes(f) && r.subsumes(f))) {
System.err.println("remainderFast: sanity check failed!");
System.exit(1);
}
*/
canRecover = true;
for(FeatureTerm candidate:originalGeneralizationsFiltered) {
if (refinement.subsumes(candidate) && r.subsumes(candidate)) {
canRecover = false;
}
}
if (canRecover) {
remainder = r;
break;
}
}
} while (remainder != null);
// System.out.println("f: ");
// System.out.println(f.toStringNOOS(dm));
// System.out.println("refinement: ");
// System.out.println(refinement.toStringNOOS(dm));
// System.out.println("Remainder: ");
// System.out.println(oldRemainder.toStringNOOS(dm));
return oldRemainder;
}
public static FeatureTerm remainderSmart(FeatureTerm f, FeatureTerm refinement, FTKBase dm, Ontology o) throws FeatureTermException {
if (DEBUG>=2) System.out.println("ReminderSmart started...");
FeatureTerm result = remainderFaster(f,refinement,dm,o);
List<FeatureTerm> l = FTUnification.unification(result, refinement, dm);
boolean found = false;
for(FeatureTerm tmp:l) {
if (f.equivalents(tmp)) {
if (DEBUG>=2) System.out.println("ReminderSmart, fast succeeded!");
return result;
}
}
// if (DEBUG>=2)
System.out.println("ReminderSmart, fast failed, starting slow remainder...");
result = remainderUnification(f, refinement, dm, o);
return result;
}
public static List<FeatureTerm> disintegrateFast(FeatureTerm f, FTKBase dm, Ontology o) throws FeatureTermException {
List<FeatureTerm> properties = null;
Pair<FeatureTerm, FeatureTerm> property_rest;
// First check cache:
properties = propertiesFastTable.get(f);
if (properties!=null) return properties;
properties = new LinkedList<FeatureTerm>();
FeatureTerm unnamed = f.clone(dm, o);
for(FeatureTerm f2:FTRefinement.variables(unnamed)) {
if (!dm.contains(f2)) f2.setName(null);
}
do {
property_rest = extractPropertyFast(unnamed, dm, o);
if (property_rest != null) {
properties.add(property_rest.m_a);
unnamed = property_rest.m_b;
// System.out.println(property_rest.m_a.toStringNOOS(dm));
// System.out.println(f.toStringNOOS(dm));
}
} while (property_rest != null);
// System.out.println(unnamed.toStringNOOS(dm));
propertiesFastTable.put(f,properties);
return properties;
}
/*
* This method takes a feature term and generates a property and generalizes the term to remove that property from it
* The unification of the property and the generalized term should result exactly in the original term
*/
public static Pair<FeatureTerm, FeatureTerm> extractPropertyFast(FeatureTerm f, FTKBase dm, Ontology o) throws FeatureTermException {
HashMap<FeatureTerm, AnnotatedPath> vp = FTRefinement.variablesWithAnnotatedPaths(f);
HashMap<FeatureTerm, List<Pair<TermFeatureTerm, Symbol>>> vap = FTRefinement.variablesWithAllParents(f);
// Check for a sort property:
for (FeatureTerm X : vap.keySet()) {
if (!X.isConstant() && !dm.contains(X)) {
Sort most_general = o.getSort("any");
Sort to_generalize_to = X.getSort().getSuper();
if (to_generalize_to != null) {
for (Pair<TermFeatureTerm, Symbol> parent : vap.get(X)) {
if (parent != null) {
Sort s = parent.m_a.getSort().featureSort(parent.m_b);
if (most_general.subsumes(s)) {
most_general = s;
}
}
}
if (to_generalize_to.is_a(most_general)) {
boolean canGeneralize = true;
if (X instanceof TermFeatureTerm) {
for (Symbol fn : ((TermFeatureTerm) X).getFeatureNames()) {
if (!to_generalize_to.hasFeature(fn)) {
canGeneralize = false;
break;
}
}
}
if (canGeneralize) {
// Build Property:
FeatureTerm property = null;
{
AnnotatedPath ap = vp.get(X);
FeatureTerm lastNode = null, tmp;
Symbol lastFeature = null;
// Reconstruct the path:
for (Pair<FeatureTerm, Symbol> n_f : ap.features) {
if (lastNode == null) {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
property = lastNode = s.createFeatureTerm();
} else {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
tmp = s.createFeatureTerm();
((TermFeatureTerm) lastNode).defineFeatureValue(lastFeature, tmp);
lastNode = tmp;
}
lastFeature = n_f.m_b;
}
if (lastNode == null) {
property = lastNode = X.getSort().createFeatureTerm();
} else {
tmp = X.getSort().createFeatureTerm();
((TermFeatureTerm) lastNode).defineFeatureValue(lastFeature, tmp);
}
}
// Generalize:
FeatureTerm generalization = FTRefinement.sortSubstitution(f, X, to_generalize_to, dm);
return new Pair<FeatureTerm, FeatureTerm>(property, generalization);
}
}
}
}
}
// Otherwise check for a constant property:
{
for (FeatureTerm node : vap.keySet()) {
if (node.isConstant() || dm.contains(node)) {
List<Pair<TermFeatureTerm, Symbol>> parents = vap.get(node);
for (Pair<TermFeatureTerm, Symbol> p_f : parents) {
FeatureTerm container = p_f.m_a.featureValue(p_f.m_b);
FeatureTerm property = null;
{
AnnotatedPath ap = vp.get(node);
FeatureTerm lastNode = null, tmp;
Symbol lastFeature = null;
// Reconstruct the path:
for (Pair<FeatureTerm, Symbol> n_f : ap.features) {
if (lastNode == null) {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
property = lastNode = s.createFeatureTerm();
} else {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
tmp = s.createFeatureTerm();
((TermFeatureTerm) lastNode).defineFeatureValue(lastFeature, tmp);
lastNode = tmp;
}
lastFeature = n_f.m_b;
}
if (lastNode == null) {
property = lastNode = node;
} else {
((TermFeatureTerm) lastNode).defineFeatureValue(lastFeature, node);
}
}
if (container.equals(node)) {
HashMap<FeatureTerm, FeatureTerm> correspondences = new HashMap<FeatureTerm, FeatureTerm>();
FeatureTerm clone = f.clone(dm, correspondences);
((TermFeatureTerm) correspondences.get(p_f.m_a)).defineFeatureValue(p_f.m_b, node.getSort().createFeatureTerm());
/*
if (clone.equivalents(f)) {
System.err.println("ERROR 2!!!");
System.err.println(f.toStringNOOS(dm));
System.err.println(clone.toStringNOOS(dm));
System.err.println(property.toStringNOOS(dm));
System.exit(1);
}
*/
return new Pair<FeatureTerm, FeatureTerm>(property, clone);
} else {
int l = ((SetFeatureTerm) container).getSetValues().size();
for (int i = 0; i < l; i++) {
FeatureTerm ft2 = ((SetFeatureTerm) container).getSetValues().get(i);
if (ft2.equals(node)) {
HashMap<FeatureTerm, FeatureTerm> correspondences = new HashMap<FeatureTerm, FeatureTerm>();
FeatureTerm clone = f.clone(dm, correspondences);
((SetFeatureTerm) correspondences.get(container)).substituteSetValue(i, ft2.getSort().createFeatureTerm());
return new Pair<FeatureTerm, FeatureTerm>(property, clone);
}
} // for
}
}
}
}
}
// Otherwise check for a set property:
{
HashMap<SetFeatureTerm, Set<Pair<TermFeatureTerm, Symbol>>> sp = FTRefinement.setsWithAllParents(f);
for (SetFeatureTerm S : sp.keySet()) {
Sort most_general = o.getSort("any");
for (Pair<TermFeatureTerm, Symbol> parent : sp.get(S)) {
Sort s = parent.m_a.getSort().featureSort(parent.m_b);
if (most_general.subsumes(s)) {
most_general = s;
}
}
for (FeatureTerm X : S.getSetValues()) {
if (X.getSort() == most_general && X.isLeaf()) {
HashMap<FeatureTerm, FeatureTerm> correspondences = new HashMap<FeatureTerm, FeatureTerm>();
FeatureTerm clone = f.clone(dm, correspondences);
if (S.getSetValues().size() == 2) {
SetFeatureTerm setClone = (SetFeatureTerm) correspondences.get(S);
setClone.removeSetValue(correspondences.get(X));
FeatureTerm toRemoveClone = setClone.getSetValues().get(0);
clone.substitute(correspondences.get(S), toRemoveClone);
} else {
FeatureTerm Xclone = correspondences.get(X);
if (Xclone != null) {
((SetFeatureTerm) correspondences.get(S)).removeSetValue(Xclone);
} else {
// X is in the domain model
((SetFeatureTerm) correspondences.get(S)).removeSetValue(X);
}
}
FeatureTerm property = null;
{
AnnotatedPath ap = vp.get(X);
FeatureTerm lastNode = null, tmp;
Symbol lastFeature = null;
// Reconstruct the path:
for (Pair<FeatureTerm, Symbol> n_f : ap.features) {
if (lastNode == null) {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
property = lastNode = s.createFeatureTerm();
} else {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
tmp = s.createFeatureTerm();
((TermFeatureTerm) lastNode).defineFeatureValue(lastFeature, tmp);
lastNode = tmp;
}
lastFeature = n_f.m_b;
}
tmp = new SetFeatureTerm();
for (int i = 0; i < S.getSetValues().size(); i++) {
((SetFeatureTerm) tmp).addSetValue(most_general.createFeatureTerm());
}
if (lastNode == null) {
property = lastNode = tmp;
} else {
((TermFeatureTerm) lastNode).defineFeatureValue(lastFeature, tmp);
}
}
return new Pair<FeatureTerm, FeatureTerm>(property, clone);
}
}
}
}
// Otherwise check for a feature property:
for (FeatureTerm X : vap.keySet()) {
if (X instanceof TermFeatureTerm) {
TermFeatureTerm TX = (TermFeatureTerm) X;
for (Symbol fname : TX.getFeatureNames()) {
FeatureTerm fvalue = TX.featureValue(fname);
if (fvalue.getSort() == X.getSort().featureSort(fname) && fvalue.isLeaf() && !fvalue.isConstant() && !dm.contains(fvalue)) {
FeatureTerm property = null;
HashMap<FeatureTerm, FeatureTerm> correspondences = new HashMap<FeatureTerm, FeatureTerm>();
FeatureTerm clone = f.clone(dm, correspondences);
TermFeatureTerm f1 = (TermFeatureTerm) correspondences.get(X);
f1.removeFeatureValue(fname);
{
AnnotatedPath ap = vp.get(fvalue);
FeatureTerm lastNode = null, tmp;
Symbol lastFeature = null;
// Reconstruct the path:
for (Pair<FeatureTerm, Symbol> n_f : ap.features) {
if (lastNode == null) {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
property = lastNode = s.createFeatureTerm();
} else {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
tmp = s.createFeatureTerm();
((TermFeatureTerm) lastNode).defineFeatureValue(lastFeature, tmp);
lastNode = tmp;
}
lastFeature = n_f.m_b;
}
if (lastNode == null) {
property = lastNode = fvalue.getSort().createFeatureTerm();
} else {
((TermFeatureTerm) lastNode).defineFeatureValue(lastFeature, fvalue.getSort().createFeatureTerm());
}
}
return new Pair<FeatureTerm, FeatureTerm>(property, clone);
}
}
}
}
// Otherwise check for a variable equality property:
{
for (FeatureTerm X : vap.keySet()) {
List<Pair<TermFeatureTerm, Symbol>> parents = vap.get(X);
/*
System.out.println("Parents of X:");
for(Pair<TermFeatureTerm,Symbol> parent:parents) {
if (parent==null) {
System.out.println("null");
} else {
System.out.println(parent.m_a.getSort().get() + "." + parent.m_b.get());
}
}
*/
if (parents.size() >= 2) {
FeatureTerm property = null;
Pair<TermFeatureTerm, Symbol> parent1 = null;
Pair<TermFeatureTerm, Symbol> parent2 = null;
boolean first = true;
// Get 2 parents of the set:
for (Pair<TermFeatureTerm, Symbol> parent : parents) {
if (first) {
parent1 = parent;
first = false;
} else {
parent2 = parent;
break;
}
}
if (parent2 == null) {
parent2 = parent1;
parent1 = null;
}
/*
if (parent1==null) {
System.out.println("parent1: null");
} else {
System.out.println("parent1: " + parent1.m_a.getSort().get() + "." + parent1.m_b.get());
}
if (parent2==null) {
System.out.println("parent2: null");
} else {
System.out.println("parent2: " + parent2.m_a.getSort().get() + "." + parent2.m_b.get());
}
*/
HashMap<FeatureTerm, FeatureTerm> correspondences = new HashMap<FeatureTerm, FeatureTerm>();
FeatureTerm clone = f.clone(dm, correspondences);
if (parent1 == null) {
FeatureTerm ft2Clone = X.getSort().createFeatureTerm();
((TermFeatureTerm) (correspondences.get(parent2.m_a))).defineFeatureValue(parent2.m_b, ft2Clone);
} else {
FeatureTerm ft2Clone = X.getSort().createFeatureTerm();
((TermFeatureTerm) (correspondences.get(parent1.m_a))).defineFeatureValue(parent1.m_b, ft2Clone);
}
// Create property:
{
AnnotatedPath ap1 = (parent1 == null ? new AnnotatedPath() : vp.get(parent1.m_a));
AnnotatedPath ap2 = vp.get(parent2.m_a);
FeatureTerm lastNode1 = null, lastNode2 = null, tmp, trail = null;
Symbol lastFeature1 = null, lastFeature2 = null;
// Reconstruct the first path:
for (Pair<FeatureTerm, Symbol> n_f : ap1.features) {
if (lastNode1 == null) {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
property = lastNode1 = s.createFeatureTerm();
} else {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
tmp = s.createFeatureTerm();
((TermFeatureTerm) lastNode1).defineFeatureValue(lastFeature1, tmp);
lastNode1 = tmp;
}
lastFeature1 = n_f.m_b;
}
// Reconstruct the second path:
trail = property;
for (Pair<FeatureTerm, Symbol> n_f : ap2.features) {
if (lastNode2 == null) {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
if (trail == null) {
property = lastNode2 = s.createFeatureTerm();
} else {
if (!s.inSort(trail)) {
trail.setSort(s);
}
lastNode2 = trail;
trail = trail.featureValue(n_f.m_b);
}
} else {
Sort s = n_f.m_a.getSort();
while (s.getSuper().hasFeature(n_f.m_b)) {
s = s.getSuper();
}
if (trail == null) {
tmp = s.createFeatureTerm();
((TermFeatureTerm) lastNode2).defineFeatureValue(lastFeature2, tmp);
lastNode2 = tmp;
} else {
if (!s.inSort(trail)) {
trail.setSort(s);
}
lastNode2 = trail;
trail = trail.featureValue(n_f.m_b);
}
}
lastFeature2 = n_f.m_b;
}
// Add the parents and the common node
{
if (lastNode1 == null) {
if (parent1 == null) {
// the variable equality refers to the root
} else {
Sort s = parent1.m_a.getSort();
while (s.getSuper().hasFeature(parent1.m_b)) {
s = s.getSuper();
}
if (property == null) {
// both paths refer to the root
lastNode1 = property = s.createFeatureTerm();
} else {
if (!s.inSort(property)) {
property.setSort(s);
}
lastNode1 = property;
}
}
} else {
Sort s = parent1.m_a.getSort();
while (s.getSuper().hasFeature(parent1.m_b)) {
s = s.getSuper();
}
tmp = lastNode1.featureValue(lastFeature1);
if (tmp == null) {
tmp = s.createFeatureTerm();
((TermFeatureTerm) lastNode1).defineFeatureValue(lastFeature1, tmp);
lastNode1 = tmp;
} else {
if (!s.inSort(tmp)) {
tmp.setSort(s);
}
lastNode1 = tmp;
}
}
if (lastNode2 == null) {
Sort s = parent2.m_a.getSort();
while (s.getSuper().hasFeature(parent2.m_b)) {
s = s.getSuper();
}
if (property == null) {
// both paths refer to the root
lastNode2 = property = s.createFeatureTerm();
} else {
if (!s.inSort(property)) {
property.setSort(s);
}
lastNode2 = property;
}
} else {
Sort s = parent2.m_a.getSort();
while (s.getSuper().hasFeature(parent2.m_b)) {
s = s.getSuper();
}
tmp = lastNode2.featureValue(lastFeature2);
if (tmp == null) {
tmp = s.createFeatureTerm();
((TermFeatureTerm) lastNode2).defineFeatureValue(lastFeature2, tmp);
lastNode2 = tmp;
} else {
if (!s.inSort(tmp)) {
tmp.setSort(s);
}
lastNode2 = tmp;
}
}
}
// System.out.println(ap1 + " -> " + parent1.m_b);
// System.out.println(ap2 + " -> " + parent2.m_b);
// System.out.println(property.toStringNOOS(dm));
// Add the common term:
{
Sort s = X.getSort();
FeatureTerm common = null;
if (parent1 == null) {
// parent1 is the root
((TermFeatureTerm) lastNode2).defineFeatureValue(parent2.m_b, property);
} else {
common = lastNode1.featureValue(parent1.m_b);
if (common == null) {
common = lastNode2.featureValue(parent2.m_b);
if (common == null) {
common = s.createFeatureTerm();
}
}
tmp = lastNode1.featureValue(parent1.m_b);
if (tmp == null) {
((TermFeatureTerm) lastNode1).defineFeatureValue(parent1.m_b, common);
} else {
common = tmp;
if (!s.inSort(common)) {
tmp.setSort(s);
}
}
tmp = lastNode2.featureValue(parent2.m_b);
if (tmp == null) {
((TermFeatureTerm) lastNode2).defineFeatureValue(parent2.m_b, common);
} else {
if (!s.inSort(common)) {
tmp.setSort(s);
}
}
}
}
// System.out.println(property.toStringNOOS(dm));
}
return new Pair<FeatureTerm, FeatureTerm>(property, clone);
}
}
}
return null;
}
}
|
package me.irieksts.red.tail.ws;
import me.irieksts.red.tail.ws.config.RedTailConfig;
import me.irieksts.red.tail.ws.health.PingHealthCheck;
import me.irieksts.red.tail.ws.resource.DataResource;
import com.yammer.dropwizard.Service;
import com.yammer.dropwizard.config.DropwizardResourceConfig;
import com.yammer.dropwizard.config.Environment;
/**
* @author Isaac Rieksts
*
*/
public class StartService extends Service<RedTailConfig> {
public static void main(String[] args) throws Exception {
new StartService().run(args);
}
protected StartService() {
super("Red Tail");
}
@Override
protected void initialize(RedTailConfig conf, Environment env)
throws Exception {
env.addHealthCheck(new PingHealthCheck());
env.addResource(new DataResource(conf));
env.disableJerseyFeature(DropwizardResourceConfig .FEATURE_DISABLE_WADL);
}
}
|
package net.karlmartens.ui.widget;
import java.text.NumberFormat;
import java.util.Arrays;
import java.util.BitSet;
import net.karlmartens.platform.text.LocalDateFormat;
import net.karlmartens.platform.util.ArraySupport;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.PaintEvent;
import org.eclipse.swt.events.PaintListener;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.TypedListener;
import org.joda.time.LocalDate;
import org.joda.time.format.DateTimeFormat;
import de.kupzog.ktable.KTable;
import de.kupzog.ktable.KTableCellEditor;
import de.kupzog.ktable.KTableCellRenderer;
import de.kupzog.ktable.KTableCellResizeListener;
import de.kupzog.ktable.KTableDefaultModel;
import de.kupzog.ktable.KTableModel;
import de.kupzog.ktable.SWTX;
import de.kupzog.ktable.renderers.CheckableCellRenderer;
import de.kupzog.ktable.renderers.DefaultCellRenderer;
import de.kupzog.ktable.renderers.FixedCellRenderer;
import de.kupzog.ktable.renderers.TextCellRenderer;
public final class TimeSeriesTable extends Composite {
public enum ScrollDataMode {
FOCUS_CELL, SELECTED_ROWS
};
private final CellSelectionManager _cellSelectionManager;
private final TimeSeriesTableListener _listener;
private final Font _defaultFont;
private final TimeSeriesTableColumn _periodColumn;
private final KTableImpl _table;
private final SparklineScrollBar _hscroll;
private final int _defaultWidth;
private final int _rowHeight;
private boolean _showHeader = false;
private ScrollDataMode _scrollDataMode = ScrollDataMode.FOCUS_CELL;
private LocalDateFormat _dateFormat = new LocalDateFormat(DateTimeFormat.shortDate());
private NumberFormat _numberFormat = NumberFormat.getNumberInstance();
private int _columnCount = 0;
private int _itemCount = 0;
private TimeSeriesTableColumn[] _columns = {};
private TimeSeriesTableItem[] _items = {};
private LocalDate[] _periods = {};
private int[] _widths = {};
private boolean _requiresRedraw = true;
private boolean _inUpdate = false;
private int _lastPeriodColumnIndex = -1;
private int[] _lastRowSelection = new int[0];
private int _lastIndexOf = -1;
public TimeSeriesTable(Composite parent) {
super(parent, SWT.NONE);
setLayout(new FormLayout());
_defaultFont = new Font(getDisplay(), "Arial", 10, SWT.BOLD);
_listener = new TimeSeriesTableListener();
final GC gc = new GC(getShell());
gc.setFont(getFont());
_periodColumn = new TimeSeriesTableColumn(this);
_defaultWidth = gc.getCharWidth('W') * 8;
_rowHeight = gc.getFontMetrics().getHeight() + 10;
gc.dispose();
_table = new KTableImpl(this, SWT.FLAT | SWT.V_SCROLL | SWT.MULTI | SWTX.MARK_FOCUS_HEADERS);
_table.setBackground(getBackground());
_table.setForeground(getForeground());
_table.setModel(new TimeSeriesTableModel());
_hscroll = new SparklineScrollBar(this, SWT.BORDER);
_hscroll.setMinimum(0);
_hscroll.setMaximum(1);
_hscroll.setSelection(0);
_hscroll.setThumb(2);
_hscroll.setLabelFont(_defaultFont);
final FormData tableData = new FormData();
tableData.top = new FormAttachment(0, 100, 0);
tableData.left = new FormAttachment(_hscroll, 0, SWT.LEFT);
tableData.bottom = new FormAttachment(_hscroll, -5, SWT.TOP);
tableData.right = new FormAttachment(_hscroll, 0, SWT.RIGHT);
final FormData scrollData = new FormData();
scrollData.left = new FormAttachment(0, 100, 0);
scrollData.bottom = new FormAttachment(100, 100, 0);
scrollData.right = new FormAttachment(100, 100, 0);
scrollData.height = 40;
_table.setLayoutData(tableData);
_hscroll.setLayoutData(scrollData);
_cellSelectionManager = new CellSelectionManager(this);
final PassthoughEventListener passthroughListener = new PassthoughEventListener(this);
passthroughListener.addSource(_table);
new TimeSeriesColumnManager(this, _table);
hookControls();
}
@Override
public void setBackground(Color color) {
super.setBackground(color);
_table.setBackground(color);
}
@Override
public void setForeground(Color color) {
super.setForeground(color);
_table.setForeground(color);
}
public int getColumnCount() {
checkWidget();
return _columnCount;
}
public int getPeriodCount() {
checkWidget();
return _periods.length;
}
public int getItemCount() {
checkWidget();
return _itemCount;
}
public int indexOf(TimeSeriesTableItem item) {
checkWidget();
checkNull(item);
if (_lastIndexOf >= 1 && _lastIndexOf < _itemCount - 1) {
if (_items[_lastIndexOf] == item)
return _lastIndexOf;
if (_items[_lastIndexOf + 1] == item)
return ++_lastIndexOf;
if (_items[_lastIndexOf - 1] == item)
return --_lastIndexOf;
}
if (_lastIndexOf < _itemCount / 2) {
for (int i = 0; i < _itemCount; i++) {
if (_items[i] == item) {
_lastIndexOf = i;
return i;
}
}
} else {
for (int i = _itemCount - 1; i >= 0; i
if (_items[i] == item) {
_lastIndexOf = i;
return i;
}
}
}
return -1;
}
public int indexOf(TimeSeriesTableColumn column) {
checkWidget();
checkNull(column);
if (column == _periodColumn) {
final Object o = _periodColumn.getData();
if (o == null)
return -1;
return ((Integer) o).intValue() + _columnCount;
}
for (int i = 0; i < _columnCount; i++) {
if (_columns[i] == column) {
return i;
}
}
return -1;
}
public TimeSeriesTableItem[] getItems() {
checkWidget();
final TimeSeriesTableItem[] items = new TimeSeriesTableItem[_itemCount];
System.arraycopy(_items, 0, items, 0, items.length);
return items;
}
public TimeSeriesTableItem getItem(int index) {
checkWidget();
checkRowIndex(index);
return _items[index];
}
public TimeSeriesTableItem getItem(Point point) {
checkWidget();
checkNull(point);
final Point dPoint = this.toDisplay(point);
final Point tPoint = _table.toControl(dPoint);
final Point cell = _table.getCellForCoordinates(tPoint.x, tPoint.y);
final int row = computeModelRow(cell.y);
if (row < 0)
return null;
return _items[row];
}
public int[] getSelectionIndices() {
checkWidget();
final BitSet selectedRows = new BitSet();
for (Point selection : _table.getCellSelection()) {
if (_showHeader && selection.y < _table.getModel().getFixedHeaderRowCount())
continue;
selectedRows.set(computeModelRow(selection.y));
}
return ArraySupport.toArray(selectedRows);
}
public TimeSeriesTableItem[] getSelection() {
checkWidget();
final int[] indices = getSelectionIndices();
final TimeSeriesTableItem[] selected = new TimeSeriesTableItem[indices.length];
for (int i = 0; i < indices.length; i++) {
selected[i] = _items[indices[i]];
}
return selected;
}
public TimeSeriesTableColumn getColumn(int index) {
checkWidget();
checkColumnIndex(index);
if (index < _columnCount)
return _columns[index];
final int periodIndex = index - _columnCount;
if (periodIndex == _lastPeriodColumnIndex)
return _periodColumn;
if (_lastPeriodColumnIndex != -1 && _lastPeriodColumnIndex < _widths.length)
_widths[_lastPeriodColumnIndex] = _periodColumn.getWidth();
try {
_inUpdate = true;
final LocalDate date = _periods[periodIndex];
_periodColumn.setText(date == null ? "" : _dateFormat.format(date));
_periodColumn.setWidth(_widths[periodIndex]);
_periodColumn.setData(periodIndex);
_lastPeriodColumnIndex = periodIndex;
} finally {
_inUpdate = false;
}
return _periodColumn;
}
public Rectangle getVisibleDataCells() {
checkWidget();
final Rectangle r = doGetVisibleDataCells();
r.x -= _columnCount;
r.y = computeModelRow(r.y);
if (r.x < 0 || r.x >= _periods.length || r.y < 0 || r.y >= _itemCount)
return new Rectangle(0,0,0,0);
return r;
}
@Override
public boolean setFocus() {
return _table.forceFocus();
}
@Override
public boolean isFocusControl() {
if (_table.isFocusControl())
return true;
return super.isFocusControl();
}
public void setHeaderVisible(boolean show) {
checkWidget();
_showHeader = show;
_table.redraw();
}
public void setPeriods(LocalDate[] periods) {
checkWidget();
checkNull(periods);
final LocalDate[] newPeriods = new LocalDate[periods.length];
System.arraycopy(periods, 0, newPeriods, 0, newPeriods.length);
Arrays.sort(newPeriods);
_periods = periods;
final int len = Math.min(_widths.length, _periods.length);
final int[] newWidths = new int[_periods.length];
System.arraycopy(_widths, 0, newWidths, 0, len);
if (len < newWidths.length) {
Arrays.fill(newWidths, len, newWidths.length, _defaultWidth);
}
_widths = newWidths;
_hscroll.setMaximum(Math.max(1, _periods.length - 1));
_table.redraw();
}
public void setDateFormat(LocalDateFormat format) {
checkWidget();
checkNull(format);
_dateFormat = format;
_table.redraw();
}
public void setNumberFormat(NumberFormat format) {
checkWidget();
checkNull(format);
_numberFormat = format;
_table.redraw();
}
public void setScrollDataMode(ScrollDataMode mode) {
checkWidget();
checkNull(mode);
_scrollDataMode = mode;
doUpdateScrollData();
}
public void deselectAll() {
checkWidget();
_table.clearSelection();
notifyListeners(SWT.Selection, new Event());
}
public void setSelection(TimeSeriesTableItem[] items) {
checkWidget();
checkNull(items);
final int[] indices = new int[items.length];
int i = 0;
for (TimeSeriesTableItem item : items) {
final int index = indexOf(item);
if (index < 0)
continue;
indices[i++] = index;
}
final int[] result = new int[i];
System.arraycopy(indices, 0, result, 0, i);
setSelection(result);
}
public void setSelection(int[] indices) {
checkWidget();
checkNull(indices);
final int width = _columnCount + _periods.length;
final Point[] selections = new Point[indices.length * width];
for (int i = 0; i < indices.length; i++) {
for (int j = 0; j < width; j++) {
selections[i * width + j] = new Point(j, computeTableRow(indices[i]));
}
}
_table.clearSelection();
_table.setSelection(selections, false);
final Event e = new Event();
e.item = indices.length > 0 ? getItem(indices[0]) : null;
notifyListeners(SWT.Selection, e);
}
public void select(int[] indices) {
checkWidget();
checkNull(indices);
final BitSet selected = new BitSet();
for (int index : getSelectionIndices()) {
selected.set(index);
}
for (int index : indices) {
selected.set(index);
}
final int[] newSelection = new int[selected.cardinality()];
int index = 0;
for (int i = selected.nextSetBit(0); i >= 0; i = selected.nextSetBit(i + 1)) {
newSelection[index++] = i;
}
setSelection(newSelection);
}
public Point[] getCellSelections() {
checkWidget();
final Point[] pts = _table.getCellSelection();
final Point[] selection = new Point[pts.length];
for (int i = 0; i < selection.length; i++) {
final Point pt = pts[i];
selection[i] = new Point(pt.x, computeModelRow(pt.y));
}
return selection;
}
public CellSelectionManager getCellSelectionManager() {
return _cellSelectionManager;
}
public void setCellSelections(Point[] selected) {
checkWidget();
checkNull(selected);
final BitSet rSelected = new BitSet();
final Point[] tSelected = new Point[selected.length];
for (int i = 0; i < tSelected.length; i++) {
final Point pt = selected[i];
tSelected[i] = new Point(pt.x, computeTableRow(pt.y));
rSelected.set(selected[i].y);
}
_table.clearSelection();
_table.setSelection(tSelected, false);
final int[] selectedRows = ArraySupport.toArray(rSelected);
final int[] update = ArraySupport.minus(selectedRows, _lastRowSelection);
if (update.length > 0) {
_lastRowSelection = selectedRows;
doUpdateRows(update);
final Event e = new Event();
e.item = selected.length > 0 ? getItem(selected[0].y) : null;
notifyListeners(SWT.Selection, e);
}
doUpdateScrollSelection();
doUpdateScrollHighlights();
doUpdateScrollData();
}
public void showSelection() {
checkWidget();
final TimeSeriesTableItem[] items = getSelection();
if (items.length == 0)
return;
showItem(items[0]);
}
public void showItem(TimeSeriesTableItem item) {
checkWidget();
checkNull(item);
final int index = indexOf(item);
if (index < 0)
return;
final int tIndex = computeTableRow(index);
final Rectangle r = _table.getVisibleCells();
if (r.y <= tIndex && (r.y + r.height) > tIndex)
return;
if (tIndex < r.y) {
_table.scroll(r.x, tIndex);
return;
}
_table.scroll(r.x, tIndex - r.height + 2);
}
public void showColumn(int index) {
checkWidget();
checkColumnIndex(index);
if (index < _columnCount)
return;
final Rectangle r = doGetVisibleDataCells();
if (r.x <= index && (r.x + r.width) > index)
return;
if (index < r.x) {
scrollColumnTo(index);
return;
}
scrollColumnTo(index - r.width + 1);
}
public void scrollColumnTo(LocalDate date) {
checkWidget();
checkNull(date);
int index = Arrays.binarySearch(_periods, date);
if (index < 0) {
index = -(index + 1);
}
scrollColumnTo(index + _columnCount);
}
private void scrollColumnTo(int index) {
checkWidget();
checkColumnIndex(index);
if (index < _columnCount)
return;
final Rectangle r = doGetVisibleDataCells();
final int row = Math.max(0, Math.min(r.y, _itemCount - _table.getVisibleRowCount() + 1));
_table.scroll(index, row);
_hscroll.setSelection(index - _columnCount);
}
public void setItemCount(int count) {
checkWidget();
final int c = Math.max(0, count);
if (c == _itemCount)
return;
if (c > _itemCount) {
for (int i = _itemCount; i < c; i++) {
new TimeSeriesTableItem(this, i);
}
return;
}
for (int i = c; i < _itemCount; i++) {
final TimeSeriesTableItem item = _items[i];
if (item != null && !item.isDisposed())
item.release();
_items[i] = null;
}
final int length = Math.max(4, (c + 3) / 4 * 4);
final TimeSeriesTableItem[] newItems = new TimeSeriesTableItem[length];
System.arraycopy(_items, 0, newItems, 0, c);
_items = newItems;
_itemCount = c;
_table.redraw();
}
public void remove(int start, int end) {
checkWidget();
if (start < 0 || start > end || end >= _itemCount)
SWT.error(SWT.ERROR_INVALID_RANGE);
for (int i = end; i >= start; i
doRemove(i);
}
_table.redraw();
}
public void remove(int[] indices) {
checkWidget();
checkNull(indices);
if (indices.length == 0)
return;
final int[] idxs = new int[indices.length];
System.arraycopy(indices, 0, idxs, 0, idxs.length);
Arrays.sort(idxs);
for (int i = idxs.length - 1; i >= 0; i
doRemove(idxs[i]);
}
_table.redraw();
}
public void removeAll() {
checkWidget();
for (int i = 0; i < _itemCount; i++) {
_items[i].release();
_items[i] = null;
}
_itemCount = 0;
_table.redraw();
}
public void clear(int index) {
checkWidget();
checkRowIndex(index);
_items[index].clear();
_table.redraw();
}
public void clearAll() {
checkWidget();
for (int i = 0; i < _itemCount; i++) {
_items[i].clear();
}
_table.redraw();
}
public void moveColumn(int fromIndex, int toIndex) {
checkWidget();
checkColumnIndex(fromIndex);
checkColumnIndex(toIndex);
if (fromIndex == toIndex)
return;
if (!_columns[fromIndex].isMoveable() || !_columns[toIndex].isMoveable())
return;
final TimeSeriesTableColumn t = _columns[fromIndex];
_columns[fromIndex] = _columns[toIndex];
_columns[toIndex] = t;
for (int i = 0; i < _itemCount; i++) {
_items[i].swapColumns(fromIndex, toIndex);
}
_columns[fromIndex].notifyListeners(SWT.Move, new Event());
_columns[toIndex].notifyListeners(SWT.Move, new Event());
_table.redraw();
}
public void addSelectionListener(SelectionListener listener) {
checkWidget();
checkNull(listener);
final TypedListener tListener = new TypedListener(listener);
addListener(SWT.Selection, tListener);
addListener(SWT.DefaultSelection, tListener);
}
public void removeSelectionListener(SelectionListener listener) {
checkWidget();
checkNull(listener);
final TypedListener tListener = new TypedListener(listener);
removeListener(SWT.Selection, tListener);
removeListener(SWT.DefaultSelection, tListener);
}
@Override
public void redraw() {
checkWidget();
if (_inUpdate)
return;
_requiresRedraw = true;
super.redraw();
}
void createItem(TimeSeriesTableColumn item, int index) {
checkWidget();
if (index < 0 || index > _columnCount)
SWT.error(SWT.ERROR_INVALID_RANGE);
if (_columns.length == _columnCount) {
final TimeSeriesTableColumn[] newColumns = new TimeSeriesTableColumn[_columns.length + 4];
System.arraycopy(_columns, 0, newColumns, 0, _columns.length);
_columns = newColumns;
}
System.arraycopy(_columns, index, _columns, index + 1, _columnCount++ - index);
_columns[index] = item;
}
void createItem(TimeSeriesTableItem item, int index) {
checkWidget();
if (index < 0 || index > _itemCount)
SWT.error(SWT.ERROR_INVALID_RANGE);
if (_items.length == _itemCount) {
final int length = Math.max(4, _items.length * 3 / 2);
final TimeSeriesTableItem[] newItems = new TimeSeriesTableItem[length];
System.arraycopy(_items, 0, newItems, 0, _items.length);
_items = newItems;
}
System.arraycopy(_items, index, _items, index + 1, _itemCount++ - index);
_items[index] = item;
}
NumberFormat getNumberFormat() {
checkWidget();
return _numberFormat;
}
Rectangle getBounds(TimeSeriesTableItem item, int index) {
checkWidget();
checkNull(item);
checkColumnIndex(index);
final int mRow = indexOf(item);
final int tRow = computeTableRow(mRow);
final Rectangle r = _table.getCellRect(index, tRow);
final Point dPoint = _table.toDisplay(r.x, r.y);
final Point pt = this.toControl(dPoint);
return new Rectangle(pt.x, pt.y, r.width, r.height);
}
Rectangle getBounds(TimeSeriesTableItem item) {
checkWidget();
checkNull(item);
final int mRow = indexOf(item);
final int tRow = computeTableRow(mRow);
Rectangle bounds = null;
for (int i = 0; i < _columnCount; i++) {
final Rectangle r = _table.getCellRect(i, tRow);
if (bounds == null) {
final Point dPoint = _table.toDisplay(r.x, r.y);
final Point pt = this.toControl(dPoint);
bounds = new Rectangle(pt.x, pt.y, 0, 0);
}
bounds.width += r.width;
bounds.height = Math.max(bounds.height, r.height);
}
return bounds;
}
Rectangle getImageBounds(TimeSeriesTableItem item, int index) {
checkWidget();
checkNull(item);
checkColumnIndex(index);
final Rectangle r = getBounds(item, index);
r.width = 0;
r.height = 0;
return r;
}
Composite getTableComposite() {
checkWidget();
return _table;
}
int getVisibleRowCount() {
checkWidget();
final KTableModel model = _table.getModel();
return _table.getVisibleRowCount() - model.getFixedHeaderRowCount() - model.getFixedSelectableRowCount();
}
int getVisibleColumnCount() {
checkWidget();
return doGetVisibleDataCells().width;
}
// TODO consolidate with getVisibleRowCount and getVisibleColumnCount
private Rectangle doGetVisibleDataCells() {
final Rectangle r = _table.getVisibleCells();
if (r.width > 0 && (_showHeader || r.y < _itemCount)) {
final int y = _showHeader ? 0 : r.y;
if (!_table.isCellFullyVisible(r.x + r.width - 1, y)) {
r.width
}
}
if (r.height > 0 && (_columnCount > 0 || r.x < _periods.length)) {
final int x = (_columnCount > 0) ? 0 : r.x;
if (!_table.isCellFullyVisible(x, r.y + r.height - 1)) {
r.height
}
}
final int correction = _columnCount + _periods.length - r.x - r.width;
if (correction < 0) {
// this is required because KTable reports more visible columns
// when scrolled all the way to the right.
r.width += correction;
}
return r;
}
private int computeTableRow(int row) {
if (_showHeader)
return row + _table.getModel().getFixedHeaderRowCount();
return row;
}
private int computeModelRow(int row) {
if (_showHeader)
return row - _table.getModel().getFixedHeaderRowCount();
return row;
}
private void hookControls() {
_table.addCellResizeListener(_listener);
_table.addPaintListener(_listener);
_hscroll.addSelectionListener(_listener);
addPaintListener(_listener);
addDisposeListener(_listener);
}
private void releaseControls() {
_table.removeCellResizeListener(_listener);
_table.removePaintListener(_listener);
_hscroll.removeSelectionListener(_listener);
removePaintListener(_listener);
removeDisposeListener(_listener);
}
private void checkNull(Object o) {
if (o == null)
SWT.error(SWT.ERROR_NULL_ARGUMENT);
}
private void checkColumnIndex(int index) {
if (index < 0 || index >= (_columnCount + _periods.length))
SWT.error(SWT.ERROR_INVALID_RANGE);
}
private void checkRowIndex(int index) {
if (index < 0 || index >= _itemCount)
SWT.error(SWT.ERROR_INVALID_RANGE);
}
private void doUpdateScrollSelection() {
final Point focus = _cellSelectionManager.getFocusCell();
if (focus == null || focus.x < _columnCount)
return;
final Rectangle r = doGetVisibleDataCells();
if (focus.x < r.x) {
_hscroll.setSelection(focus.x - _columnCount);
return;
}
if (focus.x < (r.x + r.width))
return;
final int delta = focus.x - r.x - r.width + 1;
_hscroll.setSelection(r.x + delta - _columnCount);
}
private void doUpdateScrollHighlights() {
final BitSet selectedColumns = new BitSet();
for (Point p : _table.getCellSelection()) {
if (p.x < _columnCount)
continue;
if (_showHeader && p.y == 0)
continue;
selectedColumns.set(p.x - _columnCount);
}
final int[] indices = ArraySupport.toArray(selectedColumns);
_hscroll.setHighlights(indices);
}
private void doUpdateScrollData() {
final int[] indices;
switch (_scrollDataMode) {
case FOCUS_CELL:
final Point focus = _cellSelectionManager.getFocusCell();
if (focus != null && focus.y >= 0 && focus.y < _itemCount) {
indices = new int[] { focus.y };
} else {
indices = new int[] {};
}
break;
case SELECTED_ROWS:
indices = getSelectionIndices();
break;
default:
indices = new int[] {};
}
final double[] data = new double[_periods.length];
Arrays.fill(data, 0.0);
for (int index : indices) {
for (int j = 0; j < data.length; j++) {
data[j] += _items[index].getValue(j);
}
}
_hscroll.setDataPoints(data);
}
private void doUpdateRows(int[] indices) {
if (indices.length <= 0)
return;
final int width = doGetVisibleDataCells().width + _columnCount + 1;
Arrays.sort(indices);
int previous = computeTableRow(indices[0]);
int height = 1;
for (int i = 1; i < indices.length; i++) {
final int index = computeTableRow(indices[i]);
final int delta = index - previous;
if (delta <= 1) {
height += delta;
previous = index;
continue;
}
_table.redraw(0, previous - height + 1, width, height);
previous = index;
}
_table.redraw(0, previous - height + 1, width, height);
}
private void doRemove(int index) {
_items[index].release();
System.arraycopy(_items, index + 1, _items, index, --_itemCount - index);
_items[_itemCount] = null;
}
private final class TimeSeriesTableModel extends KTableDefaultModel {
@Override
public int getFixedHeaderColumnCount() {
return 0;
}
@Override
public int getFixedHeaderRowCount() {
return _showHeader ? 1 : 0;
}
@Override
public int getFixedSelectableColumnCount() {
return _columnCount;
}
@Override
public int getFixedSelectableRowCount() {
return 0;
}
@Override
public int getRowHeightMinimum() {
return 0;
}
@Override
public boolean isColumnResizable(int col) {
return getColumn(col).isVisible();
}
@Override
public boolean isRowResizable(int row) {
return false;
}
@Override
public KTableCellEditor doGetCellEditor(int col, int row) {
// Not used
return null;
}
private final FixedCellRenderer _headerRenderer = new FixedCellRenderer(SWT.BOLD | DefaultCellRenderer.INDICATION_FOCUS_ROW);
private final TextCellRenderer _renderer = new TextCellRenderer(DefaultCellRenderer.INDICATION_FOCUS);
private final CheckableCellRenderer _checkRenderer = new CheckableCellRenderer(DefaultCellRenderer.INDICATION_FOCUS);
@Override
public KTableCellRenderer doGetCellRenderer(int col, int row) {
if (_showHeader && row == 0) {
_headerRenderer.setDefaultBackground(getBackground());
_headerRenderer.setDefaultForeground(getForeground());
_headerRenderer.setFont(getFont());
return _headerRenderer;
}
final TimeSeriesTableColumn column = getColumn(col);
final DefaultCellRenderer renderer;
if ((SWT.CHECK & column.getStyle()) > 0) {
renderer = _checkRenderer;
renderer.setAlignment(SWTX.ALIGN_HORIZONTAL_CENTER | SWTX.ALIGN_VERTICAL_CENTER);
} else {
renderer = _renderer;
if (column == _periodColumn) {
renderer.setAlignment(SWTX.ALIGN_HORIZONTAL_RIGHT | SWTX.ALIGN_VERTICAL_CENTER);
} else {
renderer.setAlignment(SWTX.ALIGN_HORIZONTAL_LEFT | SWTX.ALIGN_VERTICAL_CENTER);
}
}
final int style = column.getStyle();
if ((style & SWT.LEFT) > 0) {
renderer.setAlignment(SWTX.ALIGN_HORIZONTAL_LEFT | SWTX.ALIGN_VERTICAL_CENTER);
} else if ((style & SWT.RIGHT) > 0) {
renderer.setAlignment(SWTX.ALIGN_HORIZONTAL_RIGHT | SWTX.ALIGN_VERTICAL_CENTER);
} else if ((style & SWT.CENTER) > 0) {
renderer.setAlignment(SWTX.ALIGN_HORIZONTAL_CENTER | SWTX.ALIGN_VERTICAL_CENTER);
}
final int modelRow = computeModelRow(row);
final TimeSeriesTableItem item = getItem(modelRow);
renderer.setDefaultBackground(item.getBackground(col));
renderer.setDefaultForeground(item.getForeground(col));
renderer.setFont(item.getFont(col));
return renderer;
}
@Override
public int doGetColumnCount() {
return getFixedColumnCount() + _columnCount + _periods.length;
}
@Override
public Object doGetContentAt(int col, int row) {
if (col < 0 || col >= (_columnCount + _periods.length))
return "";
final TimeSeriesTableColumn column = getColumn(col);
if (_showHeader && row == 0) {
return column.getText();
}
final TimeSeriesTableItem item = _items[computeModelRow(row)];
final String text = item.getText(col);
if ((SWT.CHECK & column.getStyle()) > 0)
return Boolean.valueOf(text);
if (text == null)
return "";
return text;
}
@Override
public int doGetRowCount() {
return getFixedRowCount() + _itemCount;
}
@Override
public void doSetContentAt(int col, int row, Object newValue) {
// Not used
}
@Override
public int getColumnWidth(int col) {
if (col < 0 || col >= (_columnCount + _periods.length))
return 0;
final TimeSeriesTableColumn column = getColumn(col);
if (!column.isVisible())
return 0;
return column.getWidth();
}
@Override
public void setColumnWidth(int col, int value) {
if (!isColumnResizable(col))
return;
final TimeSeriesTableColumn column = getColumn(col);
column.setWidth(value);
}
@Override
public int getInitialColumnWidth(int col) {
throw new UnsupportedOperationException();
}
@Override
public int getInitialRowHeight(int row) {
return _rowHeight;
}
}
private final class TimeSeriesTableListener implements KTableCellResizeListener, SelectionListener, PaintListener, DisposeListener {
@Override
public void widgetSelected(SelectionEvent e) {
if (e.getSource() != _hscroll)
return;
final int selection = _hscroll.getSelection();
if (selection < 0 || selection >= _periods.length)
return;
_hscroll.setLabel(_dateFormat.format(_periods[selection]));
scrollColumnTo(_hscroll.getSelection() + _columnCount);
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {
// Ignore event
}
@Override
public void paintControl(PaintEvent e) {
if (e.getSource() == TimeSeriesTable.this && _requiresRedraw) {
_requiresRedraw = false;
_table.redraw();
}
if (e.getSource() != _table)
return;
final Rectangle visible = doGetVisibleDataCells();
if (visible.width <= 0) {
_hscroll.setThumb(_hscroll.getMaximum() + 1);
_hscroll.setEnabled(false);
return;
}
_hscroll.setThumb(Math.max(1, visible.width));
_hscroll.setEnabled(true);
}
@Override
public void columnResized(int col, int newWidth) {
if (col < _columnCount) {
_columns[col].notifyListeners(SWT.Resize, new Event());
}
}
@Override
public void rowResized(int row, int newHeight) {
_items[computeModelRow(row)].notifyListeners(SWT.Resize, new Event());
}
@Override
public void widgetDisposed(DisposeEvent e) {
releaseControls();
_defaultFont.dispose();
}
}
final class KTableImpl extends KTable {
private boolean _ignoreMouseMove = false;
private KTableImpl(Composite parent, int style) {
super(parent, style);
}
void setIgnoreMouseMove(boolean b) {
_ignoreMouseMove = b;
}
@Override
protected void onMouseMove(MouseEvent e) {
if (_ignoreMouseMove)
return;
super.onMouseMove(e);
}
@Override
protected void onMouseDoubleClick(MouseEvent e) {
// Disable default double click event handling
}
@Override
protected void onMouseDown(MouseEvent e) {
// Disable default event handling
if (e.button == 1) {
setCapture(true);
m_Capture = true;
// Resize column?
int columnIndex = getColumnForResize(e.x, e.y);
if (columnIndex >= 0) {
m_ResizeColumnIndex = columnIndex;
m_ResizeColumnLeft = getColumnLeft(columnIndex);
return;
}
}
}
@Override
protected void onKeyDown(KeyEvent e) {
// Disable default even handling
}
}
}
|
package mingzuozhibi.action;
import mingzuozhibi.persist.Disc;
import mingzuozhibi.persist.Sakura;
import mingzuozhibi.persist.Sakura.ViewType;
import mingzuozhibi.support.JsonArg;
import org.hibernate.criterion.Restrictions;
import org.json.JSONArray;
import org.json.JSONObject;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.*;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
@RestController
public class SakuraController extends BaseController {
public static final String DISC_COLUMNS = "id,thisRank,prevRank,totalPt,title";
public static final String DISC_COLUMNS_ADMIN = "id,asic,thisRank,surplusDays,title";
@Transactional
@GetMapping(value = "/api/sakuras", produces = MEDIA_TYPE)
public String listSakura(
@RequestParam(name = "hasDiscs", defaultValue = "true") boolean hasDiscs,
@RequestParam(name = "discColumns", defaultValue = DISC_COLUMNS) String discColumns,
@RequestParam(name = "viewType", defaultValue = "SakuraList") ViewType viewType) {
JSONArray data = new JSONArray();
@SuppressWarnings("unchecked")
List<Sakura> sakuras = dao.query(session -> {
return session.createCriteria(Sakura.class)
.add(Restrictions.eq("viewType", viewType))
.add(Restrictions.eq("enabled", true))
.list();
});
Set<String> columns = Arrays.stream(discColumns.split(",")).collect(Collectors.toSet());
sakuras.forEach(sakura -> {
JSONObject object = sakura.toJSON();
if (hasDiscs) {
object.put("discs", buildDiscs(sakura, columns));
}
data.put(object);
});
if (LOGGER.isDebugEnabled()) {
debugRequest("[size={}][discColumns={}]", sakuras.size(), discColumns);
}
return objectResult(data);
}
@Transactional
@GetMapping(value = "/api/sakuras/{id}", produces = MEDIA_TYPE)
public String viewSakura(
@PathVariable("id") Long id,
@RequestParam(name = "hasDiscs", defaultValue = "true") boolean hasDiscs,
@RequestParam(name = "discColumns", defaultValue = DISC_COLUMNS) String discColumns) {
return responseViewSakura(dao.get(Sakura.class, id), hasDiscs, discColumns);
}
@Transactional
@GetMapping(value = "/api/sakuras/key/{key}", produces = MEDIA_TYPE)
public String viewSakuraByKey(
@PathVariable("key") String key,
@RequestParam(name = "hasDiscs", defaultValue = "true") boolean hasDiscs,
@RequestParam(name = "discColumns", defaultValue = DISC_COLUMNS) String discColumns) {
return responseViewSakura(dao.lookup(Sakura.class, "key", key), hasDiscs, discColumns);
}
private String responseViewSakura(Sakura sakura, boolean hasDiscs, String discColumns) {
if (sakura == null) {
return errorMessage("");
}
JSONObject object = sakura.toJSON();
if (hasDiscs) {
Set<String> columns = Arrays.stream(discColumns.split(",")).collect(Collectors.toSet());
object.put("discs", buildDiscs(sakura, columns));
}
if (LOGGER.isDebugEnabled()) {
debugRequest("[size={}][discColumns={}]", sakura.getDiscs().size(), discColumns);
}
return objectResult(object);
}
@Transactional
@GetMapping(value = "/api/basic/sakuras", produces = MEDIA_TYPE)
public String listAdminSakura() {
JSONArray array = new JSONArray();
dao.findAll(Sakura.class).forEach(sakura -> array.put(sakura.toJSON()));
if (LOGGER.isDebugEnabled()) {
debugRequest("[size={}]", array.length());
}
return objectResult(array);
}
@Transactional
@GetMapping(value = "/api/basic/sakuras/key/{key}", produces = MEDIA_TYPE)
public String viewAdminSakuraByKey(
@PathVariable("key") String key,
@RequestParam(name = "hasDiscs", defaultValue = "false") boolean hasDiscs,
@RequestParam(name = "discColumns", defaultValue = DISC_COLUMNS_ADMIN) String discColumns) {
return responseViewSakura(dao.lookup(Sakura.class, "key", key), hasDiscs, discColumns);
}
@Transactional
@PostMapping(value = "/api/basic/sakuras", produces = MEDIA_TYPE)
public String saveAdminSakura(
@JsonArg("$.key") String key,
@JsonArg("$.title") String title,
@JsonArg("$.viewType") ViewType viewType) {
if (dao.lookup(Sakura.class, "key", key) != null) {
return errorMessage("");
}
Sakura sakura = new Sakura(key, title, viewType);
dao.save(sakura);
if (LOGGER.isInfoEnabled()) {
infoRequest("[json={}]", sakura.toJSON());
}
return objectResult(sakura.toJSON());
}
private JSONArray buildDiscs(Sakura sakura, Set<String> columns) {
JSONArray discs = new JSONArray();
sakura.getDiscs().stream()
.filter(disc -> disc.getUpdateType() != Disc.UpdateType.None)
.forEach(disc -> discs.put(disc.toJSON(columns)));
return discs;
}
@Transactional
@PostMapping(value = "/api/basic/sakuras/{id}", produces = MEDIA_TYPE)
public String editAdminUser(
@PathVariable("id") Long id,
@JsonArg("$.key") String key,
@JsonArg("$.title") String title,
@JsonArg("$.viewType") ViewType viewType,
@JsonArg("$.enabled") boolean enabled) {
Sakura sakura = dao.get(Sakura.class, id);
if (LOGGER.isDebugEnabled()) {
debugRequest("[Before:{}]", sakura.toJSON());
}
sakura.setKey(key);
sakura.setTitle(title);
sakura.setViewType(viewType);
sakura.setEnabled(enabled);
if (LOGGER.isDebugEnabled()) {
debugRequest("[Modify:{}]", sakura.toJSON());
}
return objectResult(sakura.toJSON());
}
}
|
package nu.validator.messages;
import org.xml.sax.SAXException;
import nu.validator.source.SourceHandler;
import nu.validator.xml.AttributesImpl;
import nu.validator.xml.XhtmlSaxEmitter;
public class XhtmlExtractHandler implements SourceHandler {
private static final char[] NEWLINE_SUBSTITUTE = { '\u21A9' };
private static final char[] ELLIPSIS = { '\u2026' };
private final XhtmlSaxEmitter emitter;
private static final AttributesImpl LINE_BREAK_ATTRS = new AttributesImpl();
static {
LINE_BREAK_ATTRS.addAttribute("class", "lf");
LINE_BREAK_ATTRS.addAttribute("title", "Line break");
}
/**
* @param emitter
*/
public XhtmlExtractHandler(final XhtmlSaxEmitter emitter) {
this.emitter = emitter;
}
public void characters(char[] ch, int start, int length)
throws SAXException {
if (length < 200) {
emitter.characters(ch, start, length);
} else {
emitter.characters(ch, start, 100);
emitter.startElementWithClass("span", "snip");
emitter.characters(ELLIPSIS);
emitter.endElement("span");
emitter.characters(ch, start + length - 100, 100);
}
}
public void endSource() throws SAXException {
}
public void endCharHilite() throws SAXException {
emitter.endElement("b");
}
public void endRange() throws SAXException {
emitter.endElement("b");
}
public void newLine() throws SAXException {
emitter.startElement("span", LINE_BREAK_ATTRS);
emitter.characters(NEWLINE_SUBSTITUTE);
emitter.endElement("span");
}
public void startSource() throws SAXException {
}
public void startCharHilite(int oneBasedLine, int oneBasedColumn)
throws SAXException {
emitter.startElement("b");
}
public void startRange(int oneBasedLine, int oneBasedColumn)
throws SAXException {
emitter.startElement("b");
}
}
|
package name.ignat.commons.utils;
/**
* Object-related utility methods.
*
* @author Dan Ignat
*/
public final class ObjectUtils
{
/**
* Returns {@code true} <em>iff</em> any of the {@code candidates} are equal to {@code object} via {@link
* Object#equals(Object)}.
*/
@SafeVarargs
public static <T> boolean equalsAny(T object, T... candidates)
{
if (object == null || candidates == null)
{
return false;
}
for (T candidate : candidates)
{
if (object.equals(candidate))
{
return true;
}
}
return false;
}
private ObjectUtils() { }
}
|
package net.floodlightcontroller.packet;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import org.projectfloodlight.openflow.types.IPv6Address;
import org.projectfloodlight.openflow.types.IpProtocol;
/**
* @author Jacob Chappell (jacob.chappell@uky.edu)
*/
public class IPv6 extends BasePacket {
public static Map<IpProtocol, Class<? extends IPacket>> nextHeaderClassMap;
static {
nextHeaderClassMap = new HashMap<IpProtocol, Class<? extends IPacket>>();
// TODO: Add ICMPv6, IPv6 Options, etc..
nextHeaderClassMap.put(IpProtocol.TCP, TCP.class);
nextHeaderClassMap.put(IpProtocol.UDP, UDP.class);
}
public static final int HEADER_LENGTH = 40;
protected byte version;
protected byte trafficClass;
protected int flowLabel;
protected short payloadLength;
protected IpProtocol nextHeader;
protected byte hopLimit;
protected IPv6Address sourceAddress;
protected IPv6Address destinationAddress;
public IPv6() {
super();
this.version = 6;
nextHeader = IpProtocol.NONE;
sourceAddress = IPv6Address.NONE;
destinationAddress = IPv6Address.NONE;
}
public byte getVersion() {
return version;
}
public IPv6 setVersion(byte version) {
this.version = version;
return this;
}
public byte getTrafficClass() {
return trafficClass;
}
public IPv6 setTrafficClass(byte trafficClass) {
this.trafficClass = trafficClass;
return this;
}
public int getFlowLabel() {
return flowLabel;
}
public IPv6 setFlowLabel(int flowLabel) {
this.flowLabel = flowLabel;
return this;
}
public short getPayloadLength() {
return payloadLength;
}
public IPv6 setPayloadLength(short payloadLength) {
this.payloadLength = payloadLength;
return this;
}
public IpProtocol getNextHeader() {
return nextHeader;
}
public IPv6 setNextHeader(IpProtocol nextHeader) {
this.nextHeader = nextHeader;
return this;
}
public byte getHopLimit() {
return hopLimit;
}
public IPv6 setHopLimit(byte hopLimit) {
this.hopLimit = hopLimit;
return this;
}
public IPv6Address getSourceAddress() {
return sourceAddress;
}
public IPv6 setSourceAddress(IPv6Address sourceAddress) {
this.sourceAddress = sourceAddress;
return this;
}
public IPv6Address getDestinationAddress() {
return destinationAddress;
}
public IPv6 setDestinationAddress(IPv6Address destinationAddress) {
this.destinationAddress = destinationAddress;
return this;
}
@Override
public byte[] serialize() {
// Get the raw bytes of the payload we encapsulate.
byte[] payloadData = null;
if (this.payload != null) {
this.payload.setParent(this);
payloadData = this.payload.serialize();
}
// Update our internal payload length.
this.payloadLength = (short) ((payloadData != null) ? payloadData.length : 0);
// Create a byte buffer to hold the IPv6 packet structure.
byte[] data = new byte[HEADER_LENGTH + this.payloadLength];
ByteBuffer bb = ByteBuffer.wrap(data);
// Add header fields to the byte buffer in the correct order.
// Fear not the bit magic that must occur.
bb.put((byte) (((this.version & 0xF) << 4) |
((this.trafficClass & 0xF0) >>> 4)));
bb.put((byte) (((this.trafficClass & 0xF) << 4) |
((this.flowLabel & 0xF0000) >>> 16)));
bb.putShort((short) (this.flowLabel & 0xFFFF));
bb.putShort(this.payloadLength);
bb.put((byte) this.nextHeader.getIpProtocolNumber());
bb.put(this.hopLimit);
bb.put(this.sourceAddress.getBytes());
bb.put(this.destinationAddress.getBytes());
// Add the payload to the byte buffer, if necessary.
if (payloadData != null)
bb.put(payloadData);
// We're done! Return the data.
return data;
}
@Override
public IPacket deserialize(byte[] data, int offset, int length)
throws PacketParsingException {
// Wrap the data in a byte buffer for easier retrieval.
ByteBuffer bb = ByteBuffer.wrap(data, offset, length);
// Retrieve values from IPv6 header.
byte firstByte = bb.get();
byte secondByte = bb.get();
this.version = (byte) ((firstByte & 0xF0) >>> 4);
if (this.version != 6) {
throw new PacketParsingException(
"Invalid version for IPv6 packet: " +
this.version);
}
this.trafficClass = (byte) (((firstByte & 0xF) << 4) |
((secondByte & 0xF0) >>> 4));
this.flowLabel = ((secondByte & 0xF) << 16) |
(bb.getShort() & 0xFFFF);
this.payloadLength = bb.getShort();
this.nextHeader = IpProtocol.of(bb.get()); // TODO: U8.f()?
this.hopLimit = bb.get();
byte[] sourceAddress = new byte[16];
bb.get(sourceAddress, 0, 16);
byte[] destinationAddress = new byte[16];
bb.get(destinationAddress, 0, 16);
this.sourceAddress = IPv6Address.of(sourceAddress);
this.destinationAddress = IPv6Address.of(destinationAddress);
// Retrieve the payload, if possible.
IPacket payload;
if (IPv6.nextHeaderClassMap.containsKey(this.nextHeader)) {
Class<? extends IPacket> clazz = IPv6.nextHeaderClassMap.get(this.nextHeader);
try {
payload = clazz.newInstance();
} catch (Exception e) {
throw new RuntimeException("Error parsing payload for IPv6 packet", e);
}
} else {
payload = new Data();
}
// Deserialize as much of the payload as we can (hopefully all of it).
this.payload = payload.deserialize(data, bb.position(),
Math.min(this.payloadLength, bb.limit() - bb.position()));
this.payload.setParent(this);
// We're done!
return this;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime
* result
+ ((destinationAddress == null) ? 0 : destinationAddress
.hashCode());
result = prime * result + flowLabel;
result = prime * result + hopLimit;
result = prime * result
+ ((nextHeader == null) ? 0 : nextHeader.hashCode());
result = prime * result + payloadLength;
result = prime * result
+ ((sourceAddress == null) ? 0 : sourceAddress.hashCode());
result = prime * result + trafficClass;
result = prime * result + version;
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!super.equals(obj))
return false;
if (!(obj instanceof IPv6))
return false;
IPv6 other = (IPv6) obj;
if (destinationAddress == null) {
if (other.destinationAddress != null)
return false;
} else if (!destinationAddress.equals(other.destinationAddress))
return false;
if (flowLabel != other.flowLabel)
return false;
if (hopLimit != other.hopLimit)
return false;
if (nextHeader == null) {
if (other.nextHeader != null)
return false;
} else if (!nextHeader.equals(other.nextHeader))
return false;
if (payloadLength != other.payloadLength)
return false;
if (sourceAddress == null) {
if (other.sourceAddress != null)
return false;
} else if (!sourceAddress.equals(other.sourceAddress))
return false;
if (trafficClass != other.trafficClass)
return false;
if (version != other.version)
return false;
return true;
}
}
|
package net.masterthought.cucumber.json;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.ArrayUtils;
import com.google.gson.JsonElement;
import net.masterthought.cucumber.json.support.ResultsWithMatch;
import net.masterthought.cucumber.json.support.Status;
import net.masterthought.cucumber.util.Util;
public class Step implements ResultsWithMatch {
// Start: attributes from JSON file report
private String name = null;
private final String keyword = null;
private final String line = null;
private final Result result = null;
private final Row[] rows = new Row[0];
private final Match match = null;
private final Embedded[] embeddings = new Embedded[0];
private final JsonElement[] output = new JsonElement[0];
private final DocString doc_string = null;
// End: attributes from JSON file report
private static int labelCount = 0;
private String attachments;
private String[] outputs;
private Status status;
public Row[] getRows() {
return rows;
}
public String[] getOutput() {
return outputs;
}
@Override
public Match getMatch() {
return match;
}
@Override
public Result getResult() {
return result;
}
@Override
public Embedded[] getEmbeddings() {
return embeddings;
}
public boolean hasRows() {
return ArrayUtils.isNotEmpty(rows);
}
public Status getStatus() {
return status;
}
public long getDuration() {
return result == null ? 0L : result.getDuration();
}
public String getRawName() {
return name;
}
public String getDetails() {
Status status = getStatus();
String errorMessage = null;
switch (status) {
case FAILED:
errorMessage = result.getErrorMessage();
return getStatusDetails(status, errorMessage);
case MISSING:
errorMessage = "<span class=\"missing\">Result was missing for this step</span>";
return getStatusDetails(status, errorMessage);
default:
return getStatusDetails(status, null);
}
}
private String getStatusDetails(Status status, String errorMessage) {
StringBuilder sb = new StringBuilder();
sb.append("<div class=\"").append(status.getName().toLowerCase()).append("\">");
sb.append("<span class=\"step-keyword\">").append(keyword).append(" </span>");
sb.append("<span class=\"step-name\">");
// for keyword == Before|After attribute 'name' is not available
if (StringUtils.isNotBlank(name)) {
sb.append(StringEscapeUtils.escapeHtml(name));
}
sb.append("</span>");
sb.append("<span class=\"step-duration\">");
if (status != Status.MISSING) {
sb.append(Util.formatDuration(result.getDuration()));
}
sb.append("</span>");
if (StringUtils.isNotBlank(errorMessage)) {
// if the result is not available take a hash of message reference - not perfect but still better than -1
int id = result != null ? result.hashCode() : errorMessage.hashCode();
sb.append(Util.formatErrorMessage(errorMessage, id));
}
sb.append("</div>");
sb.append(getAttachments());
return sb.toString();
}
/**
* Returns a formatted doc-string section. This is formatted w.r.t the parent Step element. To preserve whitespace
* in example, line breaks and whitespace are preserved
*
* @return string of html
*/
public String getDocString() {
if (doc_string == null || !doc_string.hasValue()) {
return "";
}
return "<div class=\"" + getStatus().getName().toLowerCase() + "\">" + "<div class=\"doc-string\">"
+ doc_string.getEscapedValue() + "</div></div>";
}
@Override
public String getAttachments() {
return attachments;
}
public void setMedaData(Scenario scenario) {
calculateAttachments();
calculateOutputs();
calculateStatus();
}
private void calculateAttachments() {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < embeddings.length; i++) {
sb.append(embeddings[i].render(i));
}
attachments = sb.toString();
}
private void calculateOutputs() {
List<String> list = new ArrayList<>();
for (JsonElement element : this.output) {
if (element.isJsonPrimitive() && element.getAsJsonPrimitive().isString()) {
String elementString = element.getAsString();
list.add(StringEscapeUtils.escapeHtml(elementString));
} else {
String elementString = element.toString();
list.add(StringEscapeUtils.escapeHtml(elementString));
}
}
outputs = list.toArray(new String[list.size()]);
}
private void calculateStatus() {
if (result == null) {
status = Status.MISSING;
} else {
status = Status.valueOf(result.getStatus().toUpperCase());
}
}
}
|
/*
* This class runs the resolvers in opennlp.textgrounder.resolver
*/
package opennlp.textgrounder.app;
import opennlp.textgrounder.resolver.*;
import opennlp.textgrounder.text.*;
import opennlp.textgrounder.text.io.*;
import opennlp.textgrounder.text.prep.*;
import opennlp.textgrounder.topo.gaz.*;
import opennlp.textgrounder.eval.*;
import opennlp.textgrounder.util.*;
import java.io.*;
import java.util.*;
import java.util.zip.*;
public class RunResolver extends BaseApp {
public static void main(String[] args) throws Exception {
initializeOptionsFromCommandLine(args);
Tokenizer tokenizer = new OpenNLPTokenizer();
//GeoNamesGazetteer gnGaz = new GeoNamesGazetteer(new BufferedReader(
// new FileReader(Constants.getGazetteersDir() + File.separator + "allCountries.txt")));
StoredCorpus testCorpus = Corpus.createStoredCorpus();
System.out.print("Reading corpus from " + getInputPath() + " ...");
testCorpus.addSource(new TrXMLDirSource(new File(getInputPath()), tokenizer));
//testCorpus.addSource(new ToponymAnnotator(new ToponymRemover(new TrXMLDirSource(new File(getInputPath()), tokenizer)),
// new OpenNLPRecognizer(), gnGaz));
testCorpus.load();
System.out.println("done.");
//System.exit(0);
StoredCorpus trainCorpus = Corpus.createStoredCorpus();
if(getAdditionalInputPath() != null) {
System.out.print("Reading additional training corpus from " + getAdditionalInputPath() + " ...");
List<Gazetteer> gazList = new ArrayList<Gazetteer>();
LoadableGazetteer trGaz = new InMemoryGazetteer();
trGaz.load(new CorpusGazetteerReader(testCorpus));
LoadableGazetteer otherGaz = new InMemoryGazetteer();
otherGaz.load(new WorldReader(new File(Constants.getGazetteersDir() + File.separator + "dataen-fixed.txt.gz")));
gazList.add(trGaz);
gazList.add(otherGaz);
Gazetteer multiGaz = new MultiGazetteer(gazList);
/*trainCorpus.addSource(new ToponymAnnotator(new PlainTextSource(
new BufferedReader(new FileReader(getAdditionalInputPath())), new OpenNLPSentenceDivider(), tokenizer),
new OpenNLPRecognizer(),
multiGaz));*/
trainCorpus.addSource(new ToponymAnnotator(new GigawordSource(
new BufferedReader(new InputStreamReader(
new GZIPInputStream(new FileInputStream(getAdditionalInputPath())))), 10, 40000),
new OpenNLPRecognizer(),
multiGaz));
trainCorpus.addSource(new TrXMLDirSource(new File(getInputPath()), tokenizer));
trainCorpus.load();
System.out.println("done.");
}
System.out.println("\nNumber of documents: " + testCorpus.getDocumentCount());
System.out.println("Number of toponym types: " + testCorpus.getToponymTypeCount());
System.out.println("Maximum ambiguity (locations per toponym): " + testCorpus.getMaxToponymAmbiguity() + "\n");
Resolver resolver;
if(getResolverType() == RESOLVER_TYPE.RANDOM) {
System.out.print("Running RANDOM resolver...");
resolver = new RandomResolver();
}
else if(getResolverType() == RESOLVER_TYPE.WEIGHTED_MIN_DIST) {
System.out.print("Running WEIGHTED MINIMUM DISTANCE resolver with " + getNumIterations() + " iteration(s)...");
resolver = new WeightedMinDistResolver(getNumIterations());
}
else if(getResolverType() == RESOLVER_TYPE.LABEL_PROP_DEFAULT_RULE) {
System.out.print("Running LABEL PROP DEFAULT RULE resolver, using graph at " + getGraphInputPath() + " ...");
resolver = new LabelProcDefaultRuleResolver(getGraphInputPath());
}
else if(getResolverType() == RESOLVER_TYPE.LABEL_PROP_CONTEXT_SENSITIVE) {
System.out.print("Running LABEL PROP CONTEXT SENSITIVE resolver, using graph at " + getGraphInputPath() + " ...");
resolver = new LabelPropContextSensitiveResolver(getGraphInputPath());
}
else {//if(getResolverType() == RESOLVER_TYPE.BASIC_MIN_DIST) {
System.out.print("Running BASIC MINIMUM DISTANCE resolver...");
resolver = new BasicMinDistResolver();
}
if(getAdditionalInputPath() != null)
resolver.train(trainCorpus);
Corpus disambiguated = resolver.disambiguate(testCorpus);
System.out.println("done.");
System.out.print("\nEvaluating...");
Evaluator evaluator = new SignatureEvaluator(testCorpus);
Report report = evaluator.evaluate(disambiguated, false);
System.out.println("done.");
System.out.println("\nResults:");
System.out.println("P: " + report.getPrecision());
System.out.println("R: " + report.getRecall());
System.out.println("F: " + report.getFScore());
System.out.println("A: " + report.getAccuracy() + "\n");
if(getOutputPath() != null) {
System.out.print("Writing resolved corpus in XML format to " + getOutputPath() + " ...");
CorpusXMLWriter w = new CorpusXMLWriter(disambiguated);
w.write(new File(getOutputPath()));
System.out.println("done.");
}
if(getKMLOutputPath() != null) {
System.out.print("Writing visualizable resolved corpus in KML format to " + getKMLOutputPath() + " ...");
CorpusKMLWriter kw = new CorpusKMLWriter(disambiguated);
kw.write(new File(getKMLOutputPath()));
System.out.println("done.");
}
}
}
|
package org.agmip.functions;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import org.agmip.ace.util.AcePathfinderUtil;
import org.agmip.common.Event;
import static org.agmip.common.Functions.*;
import org.agmip.common.Functions.CompareMode;
import static org.agmip.functions.SoilHelper.*;
import org.agmip.util.MapUtil;
import static org.agmip.util.MapUtil.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Provide static functions for experiment data handling
*
* @author Meng Zhang
* @version 0.1
*/
public class ExperimentHelper {
private static final Logger LOG = LoggerFactory.getLogger(ExperimentHelper.class);
/**
* This function will calculate the planting date which is the first date
* within the planting window<br/> that has an accumulated rainfall amount
* (P) in the previous n days.
*
* @param data The HashMap of experiment (including weather data)
* @param eDate Earliest planting date (mm-dd or mmdd)
* @param lDate Latest planting date (mm-dd or mmdd)
* @param rain Threshold rainfall amount (mm)
* @param days Number of days of accumulation
*
* @return An {@code ArrayList} of {@code pdate} for each year in the
* weather data.
*/
public static HashMap<String, ArrayList<String>> getAutoPlantingDate(HashMap data, String eDate, String lDate, String rain, String days) {
Map wthData;
ArrayList<Map> dailyData;
ArrayList<HashMap<String, String>> eventData;
Event event;
Calendar eDateCal = Calendar.getInstance();
Calendar lDateCal = Calendar.getInstance();
int intDays;
int duration;
double accRainAmtTotal;
double accRainAmt;
int expDur;
int startYear = 0;
Window[] windows;
ArrayList<String> pdates = new ArrayList<String>();
HashMap<String, ArrayList<String>> results = new HashMap<String, ArrayList<String>>();
// Check input dates
if (!isValidDate(eDate, eDateCal, "-")) {
LOG.error("INVALID EARLIST DATE:[" + eDate + "]");
return new HashMap<String, ArrayList<String>>();
}
if (!isValidDate(lDate, lDateCal, "-")) {
LOG.error("INVALID LATEST DATE:[" + lDate + "]");
return new HashMap<String, ArrayList<String>>();
}
if (eDateCal.after(lDateCal)) {
lDateCal.set(Calendar.YEAR, lDateCal.get(Calendar.YEAR) + 1);
}
duration = (int) ((lDateCal.getTimeInMillis() - eDateCal.getTimeInMillis()) / 86400000);
// Check Number of days of accumulation
try {
intDays = Integer.parseInt(days);
} catch (Exception e) {
LOG.error("INVALID NUMBER FOR NUMBER OF DAYS OF ACCUMULATION");
return new HashMap<String, ArrayList<String>>();
}
if (intDays <= 0) {
LOG.error("NON-POSITIVE NUMBER FOR NUMBER OF DAYS OF ACCUMULATION");
return new HashMap<String, ArrayList<String>>();
}
// Check Threshold rainfall amount
try {
accRainAmtTotal = Double.parseDouble(rain);
} catch (Exception e) {
LOG.error("INVALID NUMBER FOR THRESHOLD RAINFALL AMOUNT");
return new HashMap<String, ArrayList<String>>();
}
if (accRainAmtTotal <= 0) {
LOG.error("NON-POSITIVE NUMBER FOR THRESHOLD RAINFALL AMOUNT");
return new HashMap<String, ArrayList<String>>();
}
// Validation for input parameters
// Weather data check and try to get daily data
dailyData = WeatherHelper.getDailyData(data);
if (dailyData.isEmpty()) {
LOG.error("EMPTY DAILY WEATHER DATA.");
return new HashMap<String, ArrayList<String>>();
}
// Check experiment data
// Case for multiple data json structure
Map mgnData = getObjectOr(data, "management", new HashMap());
eventData = getObjectOr(mgnData, "events", new ArrayList());
// Remove all planting events, for now, as a default. This is because this generates new replaced planting events.
// NOTE: This is the "safest" way to remove items during iteration.
// Iterator<HashMap<String, String>> iter = eventData.iterator();
// while (iter.hasNext()) {
// if (getValueOr(iter.next(), "event", "").equals("planting")) {
// iter.remove();
// Check EXP_DUR is avalaible
try {
expDur = Integer.parseInt(getValueOr(data, "exp_dur", "1"));
} catch (Exception e) {
expDur = 1;
}
LOG.debug("EXP_DUR FOUND: {}", expDur);
// The starting year for multiple year runs may be set with SC_YEAR.
if (expDur > 1) {
try {
startYear = Integer.parseInt(getValueOr(data, "sc_year", "").substring(0, 4));
} catch (Exception e) {
startYear = 0;
}
}
LOG.debug("START YEAR: {}", startYear);
windows = new Window[expDur];
// Check if there is eventData existing
if (eventData.isEmpty()) {
LOG.warn("EMPTY EVENT DATA.");
event = new Event(new ArrayList(), "planting");
} else {
event = new Event(eventData, "planting");
// If only one year is to be simulated, the recorded planting date year will be used (if available).
if (expDur == 1) {
if (event.isEventExist()) {
Map plEvent = event.getCurrentEvent();
try {
startYear = Integer.parseInt(getValueOr(plEvent, "date", "").substring(0, 4));
} catch (Exception e) {
startYear = -99;
}
} else {
startYear = -99;
}
}
}
int startYearIndex = getStartYearIndex(dailyData, startYear);
// If start year is out of weather data range
if (startYearIndex == dailyData.size()) {
// If one year duration, then use the first year
if (expDur == 1) {
startYearIndex = 0;
} // If multiple year duration, then report error and end function
else {
LOG.error("THE START YEAR IS OUT OF DATA RANGE (SC_YEAR:[" + startYear + "]");
return new HashMap<String, ArrayList<String>>();
}
}
// Find the first record which is the ealiest date for the window in each year
int end;
int start = getDailyRecIndex(dailyData, eDate, startYearIndex, 0);
for (int i = 0; i < windows.length; i++) {
end = getDailyRecIndex(dailyData, lDate, start, duration);
windows[i] = new Window(start, end);
if (i + 1 < windows.length) {
start = getDailyRecIndex(dailyData, eDate, end, 365 - duration);
}
}
if (windows[0].start == dailyData.size()) {
LOG.warn("NO VALID DAILY DATA FOR SEARCH WINDOW");
// return new HashMap<String, ArrayList<String>>();
}
// Loop each window to try to find appropriate planting date
for (int i = 0; i < windows.length; i++) {
// Check first n days
int last = Math.min(windows[i].start + intDays, windows[i].end);
accRainAmt = 0;
for (int j = windows[i].start; j < last; j++) {
try {
accRainAmt += Double.parseDouble(getValueOr(dailyData.get(j), "rain", "0"));
} catch (Exception e) {
continue;
}
if (accRainAmt >= accRainAmtTotal) {
LOG.debug("1: " + getValueOr(dailyData.get(j), "w_date", "") + " : " + accRainAmt + ", " + (accRainAmt >= accRainAmtTotal));
//event.updateEvent("date", getValueOr(dailyData.get(j), "w_date", ""));
//AcePathfinderUtil.insertValue((HashMap)data, "pdate", getValueOr(dailyData.get(j), "w_date", ""));
pdates.add(getValueOr(dailyData.get(j), "w_date", ""));
break;
}
}
if (accRainAmt >= accRainAmtTotal) {
continue;
}
// // If the window size is smaller than n
// if (last > windows[i].end) {
// LOG.info("NO APPROPRIATE DATE WAS FOUND FOR NO." + (i + 1) + " PLANTING EVENT");
// // TODO remove one planting event
// // event.removeEvent();
// Check following days
int outIndex = last;
for (int j = last; j < windows[i].end; j++) {
try {
accRainAmt -= Double.parseDouble(getValueOr(dailyData.get(j - intDays), "rain", "0"));
accRainAmt += Double.parseDouble(getValueOr(dailyData.get(j), "rain", "0"));
} catch (Exception e) {
continue;
}
if (accRainAmt >= accRainAmtTotal) {
LOG.debug("2:" + getValueOr(dailyData.get(j), "w_date", "") + " : " + accRainAmt + ", " + (accRainAmt >= accRainAmtTotal));
//event.updateEvent("date", getValueOr(dailyData.get(j), "w_date", ""));
//AcePathfinderUtil.insertValue((HashMap)data, "pdate", getValueOr(dailyData.get(j), "w_date", ""));
pdates.add(getValueOr(dailyData.get(j), "w_date", ""));
break;
}
outIndex++;
}
if (accRainAmt < accRainAmtTotal) {
String lastDay = getValueOr(dailyData.get(windows[i].end - 1), "w_date", "");
LOG.error("Could not find an appropriate day to plant, using {}", lastDay);
pdates.add(lastDay);
}
}
results.put("pdate", pdates);
return results;
}
/**
* This function will calculate the planting date which is the first date
* within the planting window<br/> that has an accumulated rainfall amount
* (P) in the previous n days. The calculation will be done then planting
* date is missing in the valid planting event.
*
* @param data The HashMap of experiment (including weather data)
* @param eDate Earliest planting date (mm-dd or mmdd)
* @param lDate Latest planting date (mm-dd or mmdd)
* @param rain Threshold rainfall amount (mm)
* @param days Number of days of accumulation
*
* @return An {@code ArrayList} of {@code pdate} for each year in the
* weather data.
*/
public static HashMap<String, ArrayList<String>> getAutoFillPlantingDate(HashMap data, String eDate, String lDate, String rain, String days) {
Map wthData;
ArrayList<Map> dailyData;
ArrayList<HashMap<String, String>> eventData;
Event event;
Calendar eDateCal = Calendar.getInstance();
Calendar lDateCal = Calendar.getInstance();
int intDays;
int duration;
double accRainAmtTotal;
double accRainAmt;
// int expDur;
int startYear = 0;
ArrayList<Window> windows = new ArrayList<Window>();
ArrayList<String> pdates = new ArrayList<String>();
HashMap<String, ArrayList<String>> results = new HashMap<String, ArrayList<String>>();
// Check input dates
if (!isValidDate(eDate, eDateCal, "-")) {
LOG.error("INVALID EARLIST DATE:[" + eDate + "]");
return new HashMap<String, ArrayList<String>>();
}
if (!isValidDate(lDate, lDateCal, "-")) {
LOG.error("INVALID LATEST DATE:[" + lDate + "]");
return new HashMap<String, ArrayList<String>>();
}
if (eDateCal.after(lDateCal)) {
lDateCal.set(Calendar.YEAR, lDateCal.get(Calendar.YEAR) + 1);
}
duration = (int) ((lDateCal.getTimeInMillis() - eDateCal.getTimeInMillis()) / 86400000);
// Check Number of days of accumulation
try {
intDays = Integer.parseInt(days);
} catch (Exception e) {
LOG.error("INVALID NUMBER FOR NUMBER OF DAYS OF ACCUMULATION");
return new HashMap<String, ArrayList<String>>();
}
if (intDays <= 0) {
LOG.error("NON-POSITIVE NUMBER FOR NUMBER OF DAYS OF ACCUMULATION");
return new HashMap<String, ArrayList<String>>();
}
// Check Threshold rainfall amount
try {
accRainAmtTotal = Double.parseDouble(rain);
} catch (Exception e) {
LOG.error("INVALID NUMBER FOR THRESHOLD RAINFALL AMOUNT");
return new HashMap<String, ArrayList<String>>();
}
if (accRainAmtTotal <= 0) {
LOG.error("NON-POSITIVE NUMBER FOR THRESHOLD RAINFALL AMOUNT");
return new HashMap<String, ArrayList<String>>();
}
// Validation for input parameters
// Weather data check and try to get daily data
dailyData = WeatherHelper.getDailyData(data);
if (dailyData.isEmpty()) {
LOG.error("EMPTY DAILY WEATHER DATA.");
return new HashMap<String, ArrayList<String>>();
}
// Check experiment data
// Case for multiple data json structure
Map mgnData = getObjectOr(data, "management", new HashMap());
eventData = getObjectOr(mgnData, "events", new ArrayList());
// Check if there is eventData existing and if PDATE is already available
if (eventData.isEmpty()) {
LOG.warn("EMPTY EVENT DATA");
// event = new Event(new ArrayList(), "planting");
return new HashMap<String, ArrayList<String>>();
} else {
event = new Event(eventData, "planting");
if (event.isEventExist()) {
try {
startYear = Integer.parseInt(getValueOr(data, "sc_year", "").substring(0, 4));
} catch (Exception e) {
startYear = -99;
}
int startYearIndex = getStartYearIndex(dailyData, startYear);
// Find the first record which is the ealiest date for the window in each year
// Currently only support single planting event, so no loop hanlding here
int start = getDailyRecIndex(dailyData, eDate, startYearIndex, 0);
int end = getDailyRecIndex(dailyData, lDate, start, duration);
// startYearIndex = getDailyRecIndex(dailyData, eDate, end, 365 - duration);
Map plEvent = event.getCurrentEvent();
String pdate = getValueOr(plEvent, "date", "");
if (!pdate.equals("")) {
LOG.info("Find oringal PDATE {}, NO calculation required, AUTO_PDATE() exist", pdate);
return new HashMap<String, ArrayList<String>>();
} else {
windows.add(new Window(start, end));
}
}
}
if (windows.get(0).start == dailyData.size()) {
LOG.warn("NO VALID DAILY DATA FOR SEARCH WINDOW");
// return new HashMap<String, ArrayList<String>>();
}
// Loop each window to try to find appropriate planting date
for (int i = 0; i < windows.size(); i++) {
// Check first n days
int last = Math.min(windows.get(i).start + intDays, windows.get(i).end);
accRainAmt = 0;
for (int j = windows.get(i).start; j < last; j++) {
try {
accRainAmt += Double.parseDouble(getValueOr(dailyData.get(j), "rain", "0"));
} catch (Exception e) {
continue;
}
if (accRainAmt >= accRainAmtTotal) {
LOG.debug("1: " + getValueOr(dailyData.get(j), "w_date", "") + " : " + accRainAmt + ", " + (accRainAmt >= accRainAmtTotal));
//event.updateEvent("date", getValueOr(dailyData.get(j), "w_date", ""));
//AcePathfinderUtil.insertValue((HashMap)data, "pdate", getValueOr(dailyData.get(j), "w_date", ""));
pdates.add(getValueOr(dailyData.get(j), "w_date", ""));
break;
}
}
if (accRainAmt >= accRainAmtTotal) {
continue;
}
// Check following days
int outIndex = last;
for (int j = last; j < windows.get(i).end; j++) {
try {
accRainAmt -= Double.parseDouble(getValueOr(dailyData.get(j - intDays), "rain", "0"));
accRainAmt += Double.parseDouble(getValueOr(dailyData.get(j), "rain", "0"));
} catch (Exception e) {
continue;
}
if (accRainAmt >= accRainAmtTotal) {
LOG.debug("2:" + getValueOr(dailyData.get(j), "w_date", "") + " : " + accRainAmt + ", " + (accRainAmt >= accRainAmtTotal));
//event.updateEvent("date", getValueOr(dailyData.get(j), "w_date", ""));
//AcePathfinderUtil.insertValue((HashMap)data, "pdate", getValueOr(dailyData.get(j), "w_date", ""));
pdates.add(getValueOr(dailyData.get(j), "w_date", ""));
break;
}
outIndex++;
}
if (accRainAmt < accRainAmtTotal) {
String lastDay = getValueOr(dailyData.get(windows.get(i).end - 1), "w_date", "");
LOG.error("Could not find an appropriate day to plant, using {}", lastDay);
pdates.add(lastDay);
}
}
results.put("pdate", pdates);
return results;
}
/**
* Store a start index and end index of daily data array for a window
*/
private static class Window {
public int start;
public int end;
public Window(int start, int end) {
this.start = start;
this.end = end;
}
}
/**
* To check if the input date string is valid and match with the required
* format
*
* @param date The input date string, which should comes with the format of
* yyyy-mm-dd, the separator should be same with the third parameter
* @param out The Calendar instance which will be assigned with input year,
* month and day
* @param separator The separator string used in date format
* @return check result
*/
private static boolean isValidDate(String date, Calendar out, String separator) {
try {
String[] dates = date.split(separator);
out.set(Calendar.DATE, Integer.parseInt(dates[dates.length - 1]));
out.set(Calendar.MONTH, Integer.parseInt(dates[dates.length - 2]));
if (dates.length > 2) {
out.set(Calendar.YEAR, Integer.parseInt(dates[dates.length - 3]));
}
} catch (Exception e) {
try {
out.set(Calendar.DATE, Integer.parseInt(date.substring(date.length() - 2, date.length())));
out.set(Calendar.MONTH, Integer.parseInt(date.substring(date.length() - 4, date.length() - 2)) - 1);
if (date.length() > 4) {
out.set(Calendar.YEAR, Integer.parseInt(date.substring(date.length() - 8, date.length() - 4)) - 1);
}
} catch (Exception e2) {
return false;
}
}
return true;
}
/**
* To check if two input date string is same date with no matter about 2nd
* input's separator
*
* @param date1 1st input date string with format yyyymmdd
* @param date2 2nd input date string with format mmdd or mm-dd
* @param separator The separator used in 2nd string
* @return comparison result
*/
private static boolean isSameDate(String date1, String date2, String separator) {
date2 = date2.replace(separator, "");
if (date2.equals("0229")) {
try {
int year1 = Integer.parseInt(date1.substring(2, 4));
if (year1 % 4 != 0) {
return date1.endsWith("0228");
}
} catch (Exception e) {
return false;
}
}
return date1.endsWith(date2);
}
/**
* Find the index of daily data array for the particular date
*
* @param dailyData The array of daily data
* @param findDate The expected date
* @param start The start index for searching
* @param expectedDiff The default difference between start index and
* expected index (will try this index first, if failed then start loop)
* @return The index for the expected date, if no matching data, will return
* the size of array
*/
private static int getDailyRecIndex(ArrayList<Map> dailyData, String findDate, int start, int expectedDiff) {
String date;
if (start + expectedDiff < dailyData.size()) {
date = getValueOr(dailyData.get(start + expectedDiff), "w_date", "");
if (isSameDate(date, findDate, "-")) {
return start + expectedDiff;
} else {
expectedDiff++;
date = getValueOr(dailyData.get(start + expectedDiff), "w_date", "");
if (isSameDate(date, findDate, "-")) {
return start + expectedDiff;
}
}
}
for (int j = start; j < dailyData.size(); j++) {
date = getValueOr(dailyData.get(j), "w_date", "");
if (isSameDate(date, findDate, "-")) {
return j;
}
}
return dailyData.size();
}
private static int getStartYearIndex(ArrayList<Map> dailyData, int startYear) {
// If no starting year is provided, the multiple years will begin on the first available weather year.
int startYearIndex;
if (startYear == -99) {
LOG.warn("SC_YEAR is not valid in the data set, will using first year of weather data as start year");
startYearIndex = 0;
} else {
startYearIndex = dailyData.size();
for (int i = 0; i < dailyData.size(); i++) {
String w_date = getValueOr(dailyData.get(i), "w_date", "");
if (w_date.equals(startYear + "0101")) {
startYearIndex = i;
break;
} else if (w_date.endsWith("0101")) {
i += 364;
}
}
}
return startYearIndex;
}
/**
* Often the total amount of fertilizer in a growing season has been
* recorded, but no details of application dates, types of fertilizer,etc.
* This function allows a user to specify rules for fertilizer application
* in a region. As a result, "N" fertilizer events are added to the JSON
* object.
*
* @param num Number of fertilizer applications
* @param fecd The code for type of fertilizer added
* @param feacd The code for fertilizer application method
* @param fedep The depth at which fertilizer is applied (cm)
* @param offsets The array of date as offset from planting date (days)
* (must be paired with ptps)
* @param ptps The array of proportion of total N added (%) (must be paired
* with offsets, the sum must be 100%)
* @param data The experiment data holder
*
* @return An {@code ArrayList} of generated {@code fertilizer event} based
* on each planting date
*/
public static ArrayList<HashMap<String, String>> getFertDistribution(HashMap data, String num, String fecd, String feacd, String fedep, String[] offsets, String[] ptps) {
int iNum;
//Map expData;
ArrayList<Map> eventData;
String fen_tot;
String[] fdates;
//Event events;
String pdate;
ArrayList<HashMap<String, String>> results = new ArrayList<HashMap<String, String>>();
try {
iNum = Integer.parseInt(num);
if (iNum < 1) {
LOG.error("INPUT NUMBER OF FERTILIZER APPLICATIONS MUST BE A POSIIVE NUMBER");
return results;
}
} catch (Exception e) {
LOG.error("INPUT NUMBER OF FERTILIZER APPLICATIONS IS NOT A NUMBERIC STRING [" + num + "]");
return results;
}
// Check if the two input array have "num" pairs of these data
if (iNum != offsets.length || iNum != ptps.length) {
if (iNum > offsets.length || !compare("100", round(sum(Arrays.copyOfRange(ptps, 0, iNum)), 0), CompareMode.EQUAL)) {
LOG.error("THE REQUESTED NUMBER OF APPLICATION IS NOT MATCH WITH THE GIVEN OFFSET DATA");
return results;
}
} // Check if the sum of PTPs is 100%
else if (!compare("100", round(sum(ptps), 0), CompareMode.EQUAL)) {
LOG.error("THE SUM OF PROPORTION OF TOTAL N ADDED (%) IS NOT EQUAL TO 100%");
return results;
}
// Check if experiment data is available
//ArrayList<Map> exps = getObjectOr(data, "experiments", new ArrayList());
//if (exps.isEmpty()) {
// LOG.error("NO EXPERIMENT DATA.");
// return;
//} else {
// expData = exps.get(0);
// if (expData.isEmpty()) {
// LOG.error("NO EXPERIMENT DATA.");
// return;
// } else {
Map mgnData = getObjectOr(data, "management", new HashMap());
eventData = getObjectOr(mgnData, "events", new ArrayList());
// Check FEN_TOT is avalaible
try {
fen_tot = getValueOr(data, "fen_tot", "");
} catch (Exception e) {
LOG.error("FEN_TOT IS INVALID");
return results;
}
// Check planting date is avalaible
// events = new Event(eventData, "planting");
// if (events.isEventExist()) {
// pdate = getValueOr(events.getCurrentEvent(), "date", "");
// if (convertFromAgmipDateString(pdate) == null) {
// LOG.error("PLANTING DATE IS MISSING");
// return;
// } else {
// LOG.error("PLANTING EVENT IS MISSING");
// return;
//HashMap<String, Object> dest = new HashMap<String, Object>();
ArrayList<String> output = new ArrayList<String>();
for (Map events : eventData) {
if (getValueOr(events, "event", "").equals("planting")) {
pdate = getValueOr(events, "date", "");
// Check input days and ptps
try {
fdates = new String[iNum];
for (int i = 0; i < iNum; i++) {
fdates[i] = dateOffset(pdate, offsets[i]);
if (fdates[i] == null) {
LOG.error("INVALID OFFSET NUMBER OF DAYS [" + offsets[i] + "]");
return results;
}
}
} catch (Exception e) {
LOG.error("PAIR DATA IS IN VALID [" + e.getMessage() + "]");
return results;
}
//events.setEventType("fertilizer");
for (int i = 0; i < iNum; i++) {
String feamn = round(product(fen_tot, ptps[i], "0.01"), 0);
output.add(String.format("%s|%s", fdates[i], feamn));
}
}
}
HashMap result = new HashMap();
for (String addNew : output) {
String[] tmp = addNew.split("[|]");
AcePathfinderUtil.insertValue(result, "fedate", tmp[0]);
AcePathfinderUtil.insertValue(result, "fecd", fecd);
AcePathfinderUtil.insertValue(result, "feacd", feacd);
AcePathfinderUtil.insertValue(result, "fedep", fedep);
AcePathfinderUtil.insertValue(result, "feamn", tmp[1]);
}
results = MapUtil.getBucket(result, "management").getDataList();
return results;
}
/**
* Organic matter applications include manure, crop residues, etc. As a
* result, the organic matter application event is updated with missing
* data.
*
* @param expData The experiment data holder
* @param offset application date as days before (-) or after (+) planting
* date (days)
* @param omcd code for type of fertilizer added
* @param omc2n C:N ratio for applied organic matter
* @param omdep depth at which organic matter is incorporated (cm)
* @param ominp percentage incorporation of organic matter (%)
* @param dmr
*
* @return An updated {@code ArrayList} of {@code event}, the generated or
* updated {@code Organic matter event} will be included and sorted inside
* the {@code ArrayList}
*/
public static ArrayList<HashMap<String, String>> getOMDistribution(HashMap expData, String offset, String omcd, String omc2n, String omdep, String ominp, String dmr) {
String omamt;
ArrayList<HashMap<String, String>> eventData;
Event events;
String pdate;
String odate;
// Check if experiment data is available
// ArrayList<Map> exps = getObjectOr(data, "experiments", new ArrayList());
// if (exps.isEmpty()) {
// LOG.error("NO EXPERIMENT DATA.");
// return;
// } else {
// Map expData = exps.get(0);
// if (expData.isEmpty()) {
// LOG.error("NO EXPERIMENT DATA.");
// return;
// } else {
// Map mgnData = getObjectOr(expData, "management", new HashMap());
// eventData = getObjectOr(mgnData, "events", new ArrayList());
eventData = new ArrayList();
ArrayList<HashMap<String, String>> originalEvents = MapUtil.getBucket(expData, "management").getDataList();
for (int i = 0; i < originalEvents.size(); i++) {
HashMap tmp = new HashMap();
tmp.putAll(originalEvents.get(i));
eventData.add(tmp);
}
// Get the omamt from the first? OM event
Event omEvent = new Event(eventData, "organic_matter");
omamt = (String) omEvent.getCurrentEvent().get("om_tot");
if (omamt == null || omamt.equals("")) {
LOG.debug("OM_TOT IS NOT AVAILABLE, USING OMAMT");
omamt = (String) omEvent.getCurrentEvent().get("omamt");
}
if (omamt == null || omamt.equals("")) {
LOG.error("NEITHER OM_TOT NOR OMAMT ARE AVAILABLE");
return eventData;
}
//omamt = getValueOr(expData, "omamt", ""); // TODO will be replace by generic getting method
// Get planting date and om_date
events = new Event(eventData, "planting");
pdate = (String) events.getCurrentEvent().get("date");
if (pdate == null || pdate.equals("")) {
LOG.error("PLANTING DATE IS NOT AVAILABLE");
return eventData;
}
odate = dateOffset(pdate, offset);
if (odate == null) {
LOG.error("INVALID OFFSET NUMBER OF DAYS [" + offset + "]");
return eventData;
}
String omnpct = divide(divide("100.0", dmr, 3), omc2n, 2);
if (omnpct == null) {
LOG.error("INVALID VALUES FOR DMR and OMC2N");
return eventData;
}
// Update organic material event
events.setEventType("organic_matter");
if (events.isEventExist()) {
events.updateEvent("date", odate, false);
events.updateEvent("omcd", omcd, false);
events.updateEvent("omamt", omamt, false);
events.updateEvent("omc2n", omc2n, false);
events.updateEvent("omdep", omdep, false);
events.updateEvent("ominp", ominp, false);
events.updateEvent("omn%", omnpct, true);
}
return eventData;
}
/**
* Calculate Stable C (g[C]/100g[soil]) fraction distribution in soil layers
* and save the result into initial condition layers
*
* @param data The experiment data holder
* @param som3_0 fraction of total soil organic C which is stable, at
* surface (fraction)
* @param pp depth of topsoil where maximum SOM3 fraction is relatively
* constant (cm)
* @param rd depth at which soil C is relatively stable (~98% stable C) (cm)
*
* @return An {@code ArrayList} of calculated {@code SLSC} for each layer of
* given soil
*/
public static HashMap<String, ArrayList<String>> getStableCDistribution(HashMap data, String som3_0, String pp, String rd) {
HashMap<String, ArrayList<String>> results = new HashMap<String, ArrayList<String>>();
ArrayList<String> slscArr = new ArrayList();
ArrayList<HashMap> soilLayers;
String k;
String som2_0;
String f;
String som3_fac;
String[] sllbs;
String[] slocs;
// double mid;
String mid;
int finalScale = 2;
LOG.debug("Checkpoint 1");
try {
k = divide(log("0.02") + "", substract(rd, pp), finalScale + 1);
som2_0 = multiply("0.95", substract("1", som3_0));
} catch (Exception e) {
LOG.error("INVALID INPUT FOR NUMBERIC VALUE");
return results;
}
soilLayers = getSoilLayer(data);
if (soilLayers == null) {
return results;
} else if (soilLayers.isEmpty()) {
LOG.error("SOIL LAYER DATA IS EMPTY");
return results;
} else {
try {
sllbs = new String[soilLayers.size()];
slocs = new String[soilLayers.size()];
for (int i = 0; i < soilLayers.size(); i++) {
sllbs[i] = getObjectOr(soilLayers.get(i), "sllb", "");
slocs[i] = getObjectOr(soilLayers.get(i), "sloc", "");
}
} catch (NumberFormatException e) {
LOG.error("INVALID NUMBER FOR SLOC OR SLLB IN DATA [" + e.getMessage() + "]");
return results;
}
}
LOG.debug("Checkpoint 2");
// Check if initial condition layer data is available
// ArrayList<Map> exps = getObjectOr(data, "experiments", new ArrayList());
// if (exps.isEmpty()) {
// LOG.error("NO EXPERIMENT DATA.");
// return;
// } else {
// Map expData = exps.get(0);
// if (expData.isEmpty()) {
// LOG.error("NO EXPERIMENT DATA.");
// return;
// } else {
// Map icData = getObjectOr(data, "soil", new HashMap());
// icLayers = getObjectOr(icData, "soilLayer", new ArrayList());
// if (icLayers.isEmpty()) {
// LOG.error("NO SOIL DATA.");
// return;
// } else if (icLayers.size() != soilLayers.size()) {
// LOG.error("THE LAYER DATA IN THE INITIAL CONDITION SECTION IS NOT MATCHED WITH SOIL SECTION");
// return;
LOG.debug("Checkpoint 3");
String last = "0";
for (int i = 0; i < soilLayers.size(); i++) {
mid = average(sllbs[i], last);
last = sllbs[i];
f = getGrowthFactor(mid, pp, k, som2_0);
som3_fac = substract("1", divide(max("0.02", f), "0.95", finalScale + 1));
slscArr.add(round(multiply(slocs[i], som3_fac), finalScale));
// LOG.debug((String)icLayers.get(i).get("icbl") + ", " + (String)icLayers.get(i).get("slsc"));
}
results.put("slsc", slscArr);
return results;
}
/**
* This function will use the first event data of each type to generate the
* other events of that type for the following year in the experiment
* duration. The month and date will be same with the original one. If
* experiment duration is no longer than 1 year, this will return empty
* result set.
*
* @param data The HashMap of experiment (including weather data)
*
* @return Several groups of {@code ArrayList} of {@code Event} for each
* year in the experiment duration. The original group of {@code Event} will
* only be included when {@code duration} > 1.
*/
public static ArrayList<ArrayList<HashMap<String, String>>> getAutoEventDate(Map data) {
ArrayList<ArrayList<HashMap<String, String>>> results = new ArrayList<ArrayList<HashMap<String, String>>>();
// Get Experiment duration
int expDur;
try {
expDur = Integer.parseInt(getValueOr(data, "exp_dur", "1"));
} catch (Exception e) {
expDur = 1;
}
// If no more planting event is required
if (expDur <= 1) {
LOG.info("Experiment duration is not more than 1, AUTO_REPLICATE_EVENTS won't be applied.");
return results;
}
// Get Event date
ArrayList<HashMap<String, String>> events = MapUtil.getBucket(data, "management").getDataList();
while (results.size() < expDur) {
results.add(new ArrayList());
}
Calendar cal = Calendar.getInstance();
for (int i = 0; i < events.size(); i++) {
HashMap<String, String> event = events.get(i);
// if (convertFromAgmipDateString(date) == null) {
// String eventType = getValueOr(event, "event", "unknown");
// LOG.info("Only copy this {} event for each year without calculating date", eventType);
// for (int j = 1; j < expDur; j++) {
// results.get(j).add(event);
// continue;
// cal.setTime(dDate);
// int year = cal.get(Calendar.YEAR);
// String monthAndDay = String.format("%1$02d%2$02d",
// cal.get(Calendar.MONTH) + 1,
// cal.get(Calendar.DATE));
String date = getValueOr(event, "date", "");
String edate = getValueOr(event, "edate", "");
for (int j = 0; j < expDur; j++) {
HashMap<String, String> newEvent = new HashMap();
newEvent.putAll(event);
if (!date.equals("")) {
newEvent.put("date", yearOffset(date, j + ""));
} else {
String eventType = getValueOr(event, "event", "unknown");
LOG.error("Original {} event has an invalid date: [{}].", eventType, date);
}
if (!edate.equals("")) {
newEvent.put("edate", yearOffset(edate, j + ""));
}
results.get(j).add(newEvent);
}
}
return results;
}
}
|
package org.anc.lapps.gate;
import gate.*;
import gate.creole.AbstractLanguageAnalyser;
import gate.creole.ResourceInstantiationException;
import org.lappsgrid.api.Data;
import org.lappsgrid.api.InternalException;
import org.lappsgrid.api.WebService;
import org.lappsgrid.core.DataFactory;
import org.lappsgrid.discriminator.DiscriminatorRegistry;
import org.lappsgrid.discriminator.Types;
import org.lappsgrid.vocabulary.Metadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.io.StringWriter;
/**
* @author Keith Suderman
*/
public abstract class SimpleGateService implements WebService
{
public static final Logger logger = LoggerFactory.getLogger(SimpleGateService.class);
public static final Configuration K = new Configuration();
protected AbstractLanguageAnalyser resource;
protected Exception savedException;
protected String name;
// public static Boolean initialized = false;
public SimpleGateService()
{
synchronized (State.initialized) {
if (!State.initialized)
{
State.initialized = true; // We only try this once.
try
{
logger.info("Configuring Gate.");
File gateHome = new File(K.GATE_HOME);
if (!gateHome.exists())
{
logger.error("Gate home not found: {}", gateHome.getPath());
savedException = new FileNotFoundException(K.GATE_HOME);
return;
}
logger.info("Gate home: {}", K.GATE_HOME);
File plugins = new File(K.PLUGINS_HOME);
if (!plugins.exists())
{
logger.error("Gate plugins not found: {}", plugins.getPath());
savedException = new FileNotFoundException(K.PLUGINS_HOME);
return;
}
logger.info("Plugins home: {}", K.PLUGINS_HOME);
File siteConfig = new File(K.SITE_CONFIG);
if (!siteConfig.exists())
{
logger.error("Site config not found: {}", siteConfig.getPath());
savedException = new FileNotFoundException(K.SITE_CONFIG);
return;
}
logger.info("Site config: {}", K.SITE_CONFIG);
File userConfig = new File(K.USER_CONFIG);
if (!userConfig.exists())
{
logger.error("User config not found: {}", userConfig.getPath());
savedException = new FileNotFoundException(K.USER_CONFIG);
return;
}
logger.info("User config: {}", K.USER_CONFIG);
Gate.setGateHome(gateHome);
Gate.setSiteConfigFile(siteConfig);
Gate.setPluginsHome(plugins);
Gate.setUserConfigFile(userConfig);
try
{
logger.info("Initializing GATE");
Gate.init();
}
catch (Exception e)
{
logger.error("Error initializing GATE.", e);
savedException = e;
return;
}
File[] files = plugins.listFiles();
for (File directory : files)
{
if (directory.isDirectory())
{
logger.info("Registering plugin: {}", directory.getPath());
Gate.getCreoleRegister().registerDirectories(directory.toURI().toURL());
}
}
}
catch (Exception e)
{
logger.error("Unable to configure GATE.", e);
logger.warn(e.getMessage());
}
}
}
}
protected void createResource(String gateResourceName)
{
this.createResource(gateResourceName, Factory.newFeatureMap());
}
protected void createResource(String gateResourceName, FeatureMap map)
{
this.name = gateResourceName;
if (savedException != null)
{
// Don't stomp on the save exception.
return;
}
try
{
logger.info("Creating resource {}", gateResourceName);
resource = (AbstractLanguageAnalyser) Factory.createResource(gateResourceName, map);
logger.info("Resource created.");
}
catch (Exception e)
{
logger.error("Unable to create Gate resource.", e);
savedException = e;
}
}
public String getServiceId()
{
return this.getClass().getCanonicalName() + ":" + Version.getVersion();
}
@Override
public Data configure(Data config)
{
return DataFactory.error("Unsupported operation.");
}
@Override
public Data execute(Data input)
{
logger.debug("Executing {}", name);
if (savedException != null)
{
logger.warn("Returning saved exception: " + savedException.getMessage());
return new Data(Types.ERROR, getStackTrace(savedException));
}
Document doc = null;
try
{
doc = getDocument(input);
}
catch (InternalException e)
{
logger.error("Internal exception.", e);
return new Data(Types.ERROR, getStackTrace(e));
}
Data result = null;
// AbstractLanguageAnalyser resource = null;
try
{
// resource = pool.take();
logger.info("Executing resource {}", name);
resource.setDocument(doc);
resource.execute();
FeatureMap features = doc.getFeatures();
Object value = features.get(Metadata.PRODUCED_BY);
String producedBy = name + ":" + Version.getVersion();
if (value != null) {
producedBy = value.toString() + ", " + producedBy;
}
doc.getFeatures().put(Metadata.PRODUCED_BY, producedBy);
String xml = doc.toXml();
Factory.deleteResource(doc);
resource.setDocument(null);
result = new Data(Types.GATE, xml);
}
catch (Exception e)
{
logger.error("Error running GATE resource {}", name, e);
return new Data(Types.ERROR, getStackTrace(e));
}
finally
{
Factory.deleteResource(doc);
}
logger.info("Execution complete.");
return result;
}
Document getDocument(Data input) throws InternalException
{
Document doc = null;
try
{
long type = input.getDiscriminator();
if (type == Types.TEXT)
{
logger.info("Creating document from text.");
doc = Factory.newDocument(input.getPayload());
}
else if (type == Types.GATE)
{
logger.info("Creating document from GATE document.");
doc = (Document)
Factory.createResource("gate.corpora.DocumentImpl",
Utils.featureMap(gate.Document.DOCUMENT_STRING_CONTENT_PARAMETER_NAME,
input.getPayload(),
gate.Document.DOCUMENT_MIME_TYPE_PARAMETER_NAME, "text/xml"));
}
else
{
String name = DiscriminatorRegistry.get(type);
throw new InternalException("Unknown document type : " + name);
}
}
catch (ResourceInstantiationException ex)
{
throw new InternalException("Unable to parse Gate document", ex);
}
return doc;
}
private String getStackTrace(Throwable t)
{
StringWriter stringWriter = new StringWriter();
PrintWriter printWriter = new PrintWriter(stringWriter);
t.printStackTrace(printWriter);
return stringWriter.toString();
}
}
|
package org.biojava.bio.program.das;
import java.util.*;
import java.net.*;
import java.io.*;
import org.biojava.bio.*;
import org.biojava.utils.*;
import org.biojava.utils.cache.*;
import org.biojava.bio.seq.*;
import org.biojava.bio.seq.io.*;
import org.biojava.bio.symbol.*;
import org.biojava.bio.program.xff.*;
import org.apache.xerces.parsers.*;
import org.xml.sax.*;
import org.xml.sax.helpers.*;
import org.w3c.dom.*;
/**
* FeatureHolder reflecting features provided by a DAS annotation
* server.
*
* @since 1.1
* @author Thomas Down
* @author Matthew Pocock
*/
class DASFeatureSet implements FeatureHolder {
private FeatureRequestManager.Ticket featureTicket;
private CacheReference realFeatures;
private DASSequence refSequence;
private URL dataSource;
private String sourceID;
private String dataSourceString;
DASFeatureSet(DASSequence seq, URL ds, String id)
throws BioException
{
refSequence = seq;
dataSource = ds;
sourceID = id;
dataSourceString = dataSource.toString();
}
void registerFeatureFetcher() {
if (realFeatures != null && realFeatures.get() == null) {
realFeatures = null;
featureTicket = null;
}
if (featureTicket == null) {
SeqIOListener listener = new DASFeatureSetPopulator();
FeatureRequestManager frm = refSequence.getParentDB().getFeatureRequestManager();
featureTicket = frm.requestFeatures(dataSource, sourceID, listener);
}
}
protected FeatureHolder getFeatures() {
if (realFeatures != null) {
FeatureHolder fh = (FeatureHolder) realFeatures.get();
if (fh != null) {
return fh;
}
}
try {
registerFeatureFetcher();
featureTicket.doFetch();
} catch (ParseException ex) {
throw new BioError(ex, "Error parsing feature table");
} catch (BioException ex) {
throw new BioError(ex);
}
if (realFeatures == null) {
throw new BioError("Assertion failure: features didn't get fetched.");
}
FeatureHolder fh = (FeatureHolder) realFeatures.get();
if (fh == null) {
throw new BioError("Assertion failure: cache is stupidly small...");
}
return fh;
}
public Iterator features() {
return getFeatures().features();
}
public boolean containsFeature(Feature f) {
return getFeatures().containsFeature(f);
}
public FeatureHolder filter(FeatureFilter ff, boolean recurse) {
if (FilterUtils.areDisjoint(ff,
new FeatureFilter.ByAnnotation(DASSequence.PROPERTY_ANNOTATIONSERVER,
dataSourceString)
))
{
return FeatureHolder.EMPTY_FEATURE_HOLDER;
}
return getFeatures().filter(ff, recurse);
}
public int countFeatures() {
return getFeatures().countFeatures();
}
public Feature createFeature(Feature.Template temp)
throws ChangeVetoException
{
throw new ChangeVetoException("Can't create features on DAS sequences.");
}
public void removeFeature(Feature f)
throws ChangeVetoException
{
throw new ChangeVetoException("Can't remove features from DAS sequences.");
}
// Changeable stuff (which we're not, fortunately)
public void addChangeListener(ChangeListener cl) {}
public void addChangeListener(ChangeListener cl, ChangeType ct) {}
public void removeChangeListener(ChangeListener cl) {}
public void removeChangeListener(ChangeListener cl, ChangeType ct) {}
// Listener which is responsible for populating this FeatureSet
private class DASFeatureSetPopulator extends SeqIOAdapter {
private SimpleFeatureHolder holder;
private List featureStack = new ArrayList();
private Feature stackTop = null;
public void startSequence() {
holder = new SimpleFeatureHolder();
}
public void endSequence() {
realFeatures = refSequence.getParentDB().getFeaturesCache().makeReference(holder);
}
public void startFeature(Feature.Template temp)
throws ParseException
{
if (temp instanceof ComponentFeature.Template) {
// I'm not convinced there's an easy, safe, way to say we don't
// want these server side, so we'll elide them here instead.
// We push a null onto the stack so that we don't get confused
// over endFeature().
featureStack.add(null);
} else {
try {
Feature f = null;
if (temp.annotation == Annotation.EMPTY_ANNOTATION) {
temp.annotation = new SmallAnnotation();
} else {
if (temp.annotation.containsProperty(XFFFeatureSetHandler.PROPERTY_XFF_ID)) {
temp.annotation.setProperty(DASSequence.PROPERTY_FEATUREID,
temp.annotation.getProperty(XFFFeatureSetHandler.PROPERTY_XFF_ID));
}
}
temp.annotation.setProperty(DASSequence.PROPERTY_ANNOTATIONSERVER, dataSource);
if (stackTop == null) {
f = ((RealizingFeatureHolder) refSequence).realizeFeature(refSequence, temp);
holder.addFeature(f);
} else {
f = stackTop.createFeature(temp);
}
featureStack.add(f);
stackTop = f;
} catch (Exception ex) {
ex.printStackTrace();
throw new ParseException(ex, "Couldn't realize feature in DAS");
}
}
}
public void addFeatureProperty(Object key, Object value)
throws ParseException
{
if (stackTop == null) {
// Feature we're skipping
return;
}
try {
if (key.equals(XFFFeatureSetHandler.PROPERTY_XFF_ID)) {
stackTop.getAnnotation().setProperty(DASSequence.PROPERTY_FEATUREID, value);
} else {
stackTop.getAnnotation().setProperty(key, value);
}
} catch (ChangeVetoException ex) {
throw new ParseException(ex, "Couldn't set feature property");
} catch (NullPointerException ex) {
ex.printStackTrace();
}
}
public void endFeature()
throws ParseException
{
if (featureStack.size() < 1) {
throw new BioError("Missmatched endFeature()");
} else {
featureStack.remove(featureStack.size() - 1);
int pos = featureStack.size() - 1;
stackTop = null;
while (stackTop == null && pos >= 0) {
stackTop = (Feature) featureStack.get(pos
}
}
}
}
}
|
package org.biojava.bio.structure.io;
import org.biojava.bio.structure.* ;
import java.io.*;
import java.net.Socket ;
/** reads a PDB file from a local SRS installation using getz Actually
* is the same as PDBFileReader, but instead of reading from a file stream, reads from a
* buffered stream.
*
* @author Andreas Prlic
*
*/
public class PDBSRSReader implements StructureIO {
private BufferedReader getBufferedReader(String pdbId)
throws IOException
{
// getz -view PDBdata '[pdb:5pti]
Socket client = null;
DataInputStream input = null;
PrintStream output = null;
String machine = "" ;
int port = 0 ;
String message = "please set System properties PFETCH_machine and PFETCH_port !" ;
try {
// shouldbe sent as argument ...
machine = System.getProperty("PFETCH_host");
String p = System.getProperty("PFETCH_port");
port = Integer.parseInt(p);
} catch ( NullPointerException e) {
System.err.println(message);
e.printStackTrace();
throw new IOException() ;
} catch (IllegalArgumentException e) {
System.err.println(message);
e.printStackTrace();
throw new IOException() ;
}
if (port == 0 ) {
throw new IOException(message);
}
if ( (machine.equals(""))) {
throw new IOException(message);
}
System.out.println("contacting: " + machine + " " + port);
//Process proc = Runtime.getRuntime().exec(GETZSTRING+argument);
client = new Socket(machine , port);
client.setSoTimeout(10000) ; // 10 seconds
System.out.println("socket o.k.");
input = new DataInputStream(client.getInputStream());
BufferedReader buf = new BufferedReader (new InputStreamReader (input));
System.out.println("sending: --pdb " + pdbId.toLowerCase());
output = new PrintStream(client.getOutputStream());
output.println("--pdb "+ pdbId.toLowerCase());
output.flush();
// check if return is O.K.
buf.mark(100);
String line = buf.readLine();
buf.reset();
if ( line.equals("no match")) {
System.out.println("first line: " + line );
throw new IOException("no pdb with code "+pdbId.toLowerCase() +" found");
}
return buf ;
}
/** load a structure from from SRS installation using wgetz
*/
public Structure getStructureById(String pdbId)
throws IOException
{
BufferedReader buf ;
//inStream = getInputStream();
buf = getBufferedReader(pdbId) ;
/*String line = buf.readLine ();
while (line != null) {
System.out.println (line);
line = buf.readLine ();
}
return null ;
*/
Structure s = null ;
try{
//System.out.println("Starting to parse PDB file " + getTimeStamp());
PDBFileParser pdbpars = new PDBFileParser();
s = pdbpars.parsePDBFile(buf) ;
//System.out.println("Done parsing PDB file " + getTimeStamp());
} catch(Exception ex){
ex.printStackTrace();
}
return s ;
}
}
|
package org.ensembl.healthcheck.gui;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.ensembl.healthcheck.DatabaseRegistry;
import org.ensembl.healthcheck.DatabaseRegistryEntry;
import org.ensembl.healthcheck.ReportLine;
import org.ensembl.healthcheck.ReportManager;
import org.ensembl.healthcheck.Reporter;
import org.ensembl.healthcheck.TestRegistry;
import org.ensembl.healthcheck.TestRunner;
import org.ensembl.healthcheck.testcase.EnsTestCase;
import org.ensembl.healthcheck.testcase.MultiDatabaseTestCase;
import org.ensembl.healthcheck.testcase.SingleDatabaseTestCase;
import org.ensembl.healthcheck.util.CallbackHandler;
import org.ensembl.healthcheck.util.LogFormatter;
import org.ensembl.healthcheck.util.MyStreamHandler;
import org.ensembl.healthcheck.util.Utils;
/**
* Graphical test runner.
*/
public class GuiTestRunner extends TestRunner implements Reporter {
/** The logger to use for this class */
protected static Logger logger = Logger.getLogger("HealthCheckLogger");
private boolean debug = false;
private GuiTestRunnerFrame gtrf;
/**
* Command-line entry point.
*
* @param args Command line arguments.
*/
public static void main(String[] args) {
new GuiTestRunner().run(args);
} // main
private void run(String[] args) {
outputLevel = ReportLine.ALL; // filtered later
ReportManager.setReporter(this);
parseCommandLine(args);
Utils.readPropertiesFileIntoSystem(PROPERTIES_FILE, false);
List<String> regexps = new ArrayList<String>();
regexps.add(".*");
DatabaseRegistry databaseRegistry = new DatabaseRegistry(regexps, null, null, false);
if (databaseRegistry.getEntryCount() == 0) {
logger.warning("Warning: no databases found!");
}
gtrf = new GuiTestRunnerFrame(this, new TestRegistry(), databaseRegistry);
gtrf.setVisible(true);
setupLogging();
}
private void setupLogging() {
logger.setUseParentHandlers(false); // stop parent logger getting the message
logger.addHandler(new CallbackHandler(gtrf, new LogFormatter()));
logger.addHandler(new MyStreamHandler(System.out, new LogFormatter()));
logger.setLevel(Level.WARNING); // default - only print important messages
if (debug) {
logger.setLevel(Level.FINEST);
}
} // setupLogging
private void parseCommandLine(String[] args) {
if (args.length > 0 && args[0].equals("-debug")) {
debug = true;
logger.finest("Running in debug mode");
} // parseCommandLine
}
/**
* Run all the tests in a list.
*
* @param ltests The tests to run.
* @param ldatabases The databases to run the tests on.
* @param lgtrf The test runner frame in which to display the results.
*/
protected void runAllTests(EnsTestCase[] ltests, DatabaseRegistryEntry[] ldatabases, GuiTestRunnerFrame lgtrf) {
// need to run the tests in a separate thread
final GuiTestRunnerFrame gtrf = lgtrf;
final EnsTestCase[] tests = ltests;
final DatabaseRegistryEntry[] databases = ldatabases;
Thread t = new Thread() {
public void run() {
gtrf.setTestProgressDialogVisibility(true);
int totalTestsToRun = tests.length * databases.length;
gtrf.setTotalToRun(totalTestsToRun);
// for each test, if it's a single database test we run it against each
// selected database in turn
// for multi-database tests, we create a new DatabaseRegistry containing
// the selected tests and use that
for (int i = 0; i < tests.length; i++) {
EnsTestCase testCase = tests[i];
if (testCase instanceof SingleDatabaseTestCase) {
for (int j = 0; j < databases.length; j++) {
DatabaseRegistryEntry dbre = databases[j];
String message = testCase.getShortTestName() + ": " + dbre.getName();
gtrf.updateProgressDialog(message);
((SingleDatabaseTestCase) testCase).run(dbre);
gtrf.incrementNumberRun(1);
gtrf.updateProgressDialog();
gtrf.repaintTestProgressDialog();
}
} else if (testCase instanceof MultiDatabaseTestCase) {
DatabaseRegistry dbr = new DatabaseRegistry(databases);
String message = testCase.getShortTestName() + " ( " + dbr.getEntryCount() + " databases)";
gtrf.updateProgressDialog(message);
((MultiDatabaseTestCase) testCase).run(dbr);
gtrf.incrementNumberRun(dbr.getEntryCount());
gtrf.updateProgressDialog();
}
// TODO - warn about not running OrderedDatabaseTestCase
}
gtrf.setTestProgressDialogVisibility(false);
gtrf.createResultFrame();
} // run
}; // thread
t.start();
} // runAllTests
// Implementation of Reporter interface
/**
* Called when a message is to be stored in the report manager.
*
* @param reportLine The message to store.
*/
public void message(ReportLine reportLine) {
}
/**
* Called just before a test case is run.
*
* @param testCase The test case about to be run.
* @param dbre The database which testCase is to be run on, or null of no/several databases.
*/
public void startTestCase(EnsTestCase testCase, DatabaseRegistryEntry dbre) {
}
/**
* Should be called just after a test case has been run.
*
* @param testCase The test case that was run.
* @param result The result of testCase.
* @param dbre The database which testCase was run on, or null of no/several databases.
*/
public void finishTestCase(EnsTestCase testCase, boolean result, DatabaseRegistryEntry dbre) {
}
} // GuiTestRunner
|
package org.exist.xquery.test;
import junit.framework.TestCase;
import junit.textui.TestRunner;
import org.exist.storage.DBBroker;
import org.exist.xmldb.DatabaseInstanceManager;
import org.exist.xquery.XPathException;
import org.xmldb.api.DatabaseManager;
import org.xmldb.api.base.Collection;
import org.xmldb.api.base.Database;
import org.xmldb.api.base.ResourceSet;
import org.xmldb.api.base.XMLDBException;
import org.xmldb.api.modules.CollectionManagementService;
import org.xmldb.api.modules.XPathQueryService;
import java.text.*;
import java.util.*;
/** Tests for various standart XQuery functions
* @author jens
*/
public class XQueryFunctionsTest extends TestCase {
private String[] testvalues;
private String[] resultvalues;
private XPathQueryService service;
private Collection root = null;
private Database database = null;
public static void main(String[] args) throws XPathException {
TestRunner.run(XQueryFunctionsTest.class);
}
/**
* Constructor for XQueryFunctionsTest.
* @param arg0
*/
public XQueryFunctionsTest(String arg0) {
super(arg0);
}
/** Tests the XQuery-/XPath-function fn:round-half-to-even
* with the rounding value typed xs:integer
*/
public void testRoundHtE_INTEGER() throws XPathException {
ResourceSet result = null;
String query = null;
String r = "";
try {
query = "fn:round-half-to-even( xs:integer('1'), 0 )";
result = service.query( query );
r = (String) result.getResource(0).getContent();
assertEquals( "1", r );
query = "fn:round-half-to-even( xs:integer('6'), -1 )";
result = service.query( query );
r = (String) result.getResource(0).getContent();
assertEquals( "10", r );
query = "fn:round-half-to-even( xs:integer('5'), -1 )";
result = service.query( query );
r = (String) result.getResource(0).getContent();
assertEquals( "0", r );
} catch (XMLDBException e) {
System.out.println("testRoundHtE_INTEGER(): "+e);
fail(e.getMessage());
}
}
/** Tests the XQuery-/XPath-function fn:round-half-to-even
* with the rounding value typed xs:double
*/
public void testRoundHtE_DOUBLE() throws XPathException {
/* List of Values to test with Rounding */
String[] testvalues =
{ "0.5", "1.5", "2.5", "3.567812E+3", "4.7564E-3", "35612.25" };
String[] resultvalues =
{ "0.0", "2.0", "2.0", "3567.81", "0.0", "35600.0" };
int[] precision =
{ 0, 0, 0, 2, 2, -2 };
ResourceSet result = null;
String query = null;
try {
XPathQueryService service = (XPathQueryService) root.getService( "XQueryService", "1.0" );
for (int i=0; i<testvalues.length; i++) {
query = "fn:round-half-to-even( xs:double('" + testvalues[i] + "'), " + precision[i] + " )";
result = service.query( query );
String r = (String) result.getResource(0).getContent();
assertEquals( resultvalues[i], r );
}
} catch (XMLDBException e) {
System.out.println("testRoundHtE_DOUBLE(): "+e);
fail(e.getMessage());
}
}
/** Tests the XQuery-XPath function fn:tokenize() */
public void testTokenize() throws XPathException {
ResourceSet result = null;
String r = "";
try {
result = service.query( "count ( tokenize('a/b' , '/') )" );
r = (String) result.getResource(0).getContent();
assertEquals( "2", r );
result = service.query( "count ( tokenize('a/b/' , '/') )" );
r = (String) result.getResource(0).getContent();
assertEquals( "3", r );
result = service.query( "count ( tokenize('' , '/') )" );
r = (String) result.getResource(0).getContent();
assertEquals( "0", r );
result = service.query(
"let $res := fn:tokenize('abracadabra', '(ab)|(a)')" +
"let $reference := ('', 'r', 'c', 'd', 'r', '')" +
"return fn:deep-equal($res, $reference)" );
r = (String) result.getResource(0).getContent();
assertEquals( "true", r );
} catch (XMLDBException e) {
System.out.println("testTokenize(): " + e);
fail(e.getMessage());
}
}
public void testDeepEqual() throws XPathException {
ResourceSet result = null;
String r = "";
try {
result = service.query(
"let $res := ('a', 'b')" +
"let $reference := ('a', 'b')" +
"return fn:deep-equal($res, $reference)" );
r = (String) result.getResource(0).getContent();
assertEquals( "true", r );
} catch (XMLDBException e) {
System.out.println("testTokenize(): " + e);
fail(e.getMessage());
}
}
public void testCompare() throws XPathException {
ResourceSet result = null;
String r = "";
try {
result = service.query("fn:compare(\"Strasse\", \"Stra\u00DFe\")");
r = (String) result.getResource(0).getContent();
assertEquals( "-1", r );
//result = service.query("fn:compare(\"Strasse\", \"Stra\u00DFe\", \"java:GermanCollator\")");
//r = (String) result.getResource(0).getContent();
//assertEquals( "0", r );
} catch (XMLDBException e) {
System.out.println("testTokenize(): " + e);
fail(e.getMessage());
}
}
public void testDistinctValues() throws XPathException {
ResourceSet result = null;
String r = "";
try {
result = service.query( "declare variable $c { distinct-values(('a', 'a')) }; $c" );
r = (String) result.getResource(0).getContent();
assertEquals( "a", r );
result = service.query( "declare variable $c { distinct-values((<a>a</a>, <b>a</b>)) }; $c" );
r = (String) result.getResource(0).getContent();
assertEquals( "a", r );
result = service.query( "let $seq := ('A', 2, 'B', 2) return distinct-values($seq) " );
assertEquals( 3, result.getSize() );
} catch (XMLDBException e) {
System.out.println("testTokenize(): " + e);
fail(e.getMessage());
}
}
public void testSum() throws XPathException {
ResourceSet result = null;
String r = "";
try {
result = service.query( "declare variable $c { sum((1, 2)) }; $c" );
r = (String) result.getResource(0).getContent();
assertEquals( "3", r );
result = service.query( "declare variable $c { sum((<a>1</a>, <b>2</b>)) }; $c" );
r = (String) result.getResource(0).getContent();
//Any untyped atomic values in the sequence are converted to xs:double values ([MK Xpath 2.0], p. 432)
assertEquals( "3.0", r );
result = service.query( "declare variable $c { sum((), 3) }; $c" );
r = (String) result.getResource(0).getContent();
assertEquals( "3", r );
} catch (XMLDBException e) {
System.out.println("testTokenize(): " + e);
fail(e.getMessage());
}
}
public void testAvg() throws XPathException {
ResourceSet result = null;
String r = "";
try {
result = service.query( "declare variable $c { avg((2, 2)) }; $c" );
r = (String) result.getResource(0).getContent();
assertEquals( "2", r );
result = service.query( "declare variable $c { avg((<a>2</a>, <b>2</b>)) }; $c" );
r = (String) result.getResource(0).getContent();
//Any untyped atomic values in the resulting sequence
//(typically, values extracted from nodes in a schemaless document)
//are converted to xs:double values ([MK Xpath 2.0], p. 301)
assertEquals( "2.0", r );
result = service.query( "declare variable $c { avg(()) }; $c" );
assertEquals( 0, result.getSize());
} catch (XMLDBException e) {
System.out.println("testTokenize(): " + e);
fail(e.getMessage());
}
}
public void testMin() throws XPathException {
ResourceSet result = null;
String r = "";
try {
result = service.query( "declare variable $c { min((1, 2)) }; $c" );
r = (String) result.getResource(0).getContent();
assertEquals( "1", r );
result = service.query( "declare variable $c { min((<a>1</a>, <b>2</b>)) }; $c" );
r = (String) result.getResource(0).getContent();
//Any untyped atomic values in the resulting sequence
//(typically, values extracted from nodes in a schemaless document)
//are converted to xs:double values ([MK Xpath 2.0], p. 372)
assertEquals( "1.0", r );
result = service.query( "declare variable $c { min(()) }; $c" );
assertEquals( 0, result.getSize());
} catch (XMLDBException e) {
System.out.println("testMin(): " + e);
fail(e.getMessage());
}
}
public void testMax() throws XPathException {
ResourceSet result = null;
String r = "";
try {
result = service.query( "declare variable $c { max((1, 2)) }; $c" );
r = (String) result.getResource(0).getContent();
assertEquals( "2", r );
result = service.query( "declare variable $c { max((<a>1</a>, <b>2</b>)) }; $c" );
r = (String) result.getResource(0).getContent();
//Any untyped atomic values in the resulting sequence
//(typically, values extracted from nodes in a schemaless document)
//are converted to xs:double values ([MK Xpath 2.0], p. 370)
assertEquals( "2.0", r );
result = service.query( "declare variable $c { max(()) }; $c" );
assertEquals( 0, result.getSize());
} catch (XMLDBException e) {
System.out.println("testMax(): " + e);
fail(e.getMessage());
}
}
public void testExclusiveLock() throws XPathException {
ResourceSet result = null;
String r = "";
try {
String query = "let $query1 := (<a/>)\n" +
"let $query2 := (2, 3)\n" +
/*
* @see TestCase#setUp()
*/
protected void setUp() throws Exception {
// initialize driver
Class cl = Class.forName("org.exist.xmldb.DatabaseImpl");
database = (Database) cl.newInstance();
database.setProperty("create-database", "true");
DatabaseManager.registerDatabase(database);
root = DatabaseManager.getCollection("xmldb:exist://" + DBBroker.ROOT_COLLECTION, "admin", null);
service = (XPathQueryService) root.getService( "XQueryService", "1.0" );
}
/*
* @see TestCase#tearDown()
*/
protected void tearDown() throws Exception {
DatabaseManager.deregisterDatabase(database);
DatabaseInstanceManager dim =
(DatabaseInstanceManager) root.getService("DatabaseInstanceManager", "1.0");
dim.shutdown();
//System.out.println("tearDown PASSED");
}
}
|
package org.helioviewer.jhv.imagedata;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.concurrent.ForkJoinTask;
import java.util.concurrent.RecursiveTask;
import java.util.ArrayList;
import org.helioviewer.jhv.math.MathUtils;
public class ImageFilter {
private static final int SII_MIN_K = 3;
private static final int SII_MAX_K = 5;
private static final double sigma0 = 100.0 / Math.PI;
private static final short[][] radii0 = {
{76, 46, 23, 0, 0},
{82, 56, 37, 19, 0},
{85, 61, 44, 30, 16}};
private static final float[][] weights0 = {
{0.1618f, 0.5502f, 0.9495f, 0, 0},
{0.0976f, 0.3376f, 0.6700f, 0.9649f, 0},
{0.0739f, 0.2534f, 0.5031f, 0.7596f, 0.9738f}};
// Box weights
private final float[] weights = new float[SII_MAX_K];
// Box radii
private final int[] radii = new int[SII_MAX_K];
// Number of boxes
private final int K;
private final float[] buffer;
public ImageFilter(double sigma, int _K, int N) {
K = _K;
int i = K - SII_MIN_K;
double sum = 0;
for (int k = 0; k < K; ++k) {
radii[k] = (int) (radii0[i][k] * (sigma / sigma0) + 0.5);
sum += weights0[i][k] * (2 * radii[k] + 1);
}
for (int k = 0; k < K; ++k)
weights[k] = (float) (weights0[i][k] / sum);
int pad = radii[0] + 1;
buffer = new float[N + 2 * pad];
}
private static int extension(int N, int n) {
while (true) {
if (n < 0)
n = -1 - n; // Reflect over n = -1/2
else if (n >= N)
n = 2 * N - 1 - n; // Reflect over n = N - 1/2
else
break;
}
return n;
}
private void gaussianConv(float[] dst, float[] src, int N, int stride, int offset) {
int pad = radii[0] + 1;
float accum = 0;
// Compute cumulative sum of src over n = -pad,..., N + pad - 1
for (int n = -pad; n < N + pad; ++n) {
accum += src[offset + stride * extension(N, n)];
buffer[pad + n] = accum;
}
// Compute stacked box filters
for (int n = 0; n < N; ++n) {
accum = weights[0] * (buffer[pad + n + radii[0]] - buffer[pad + n - radii[0] - 1]);
for (int k = 1; k < K; ++k)
accum += weights[k] * (buffer[pad + n + radii[k]] - buffer[pad + n - radii[k] - 1]);
dst[offset + stride * n] = accum;
}
}
private void gaussianConvImage(float[] dst, float[] src, int width, int height) {
// Filter each row
for (int y = 0; y < height; ++y)
gaussianConv(dst, src, width, 1, width * y);
// Filter each column
for (int x = 0; x < width; ++x)
gaussianConv(dst, dst, height, width, x);
}
private static final int _K = 3;
private static final float H = 0.7f;
private static final double KA = 0.7;
private static final double[] sigmas = {1, 4, 16, 64};
private static float Atan(double x) {
x = MathUtils.clip(x, -1, 1);
return (float) (x * ((Math.PI / 4 + 0.186982) - 0.191942 * x * x));
}
@SuppressWarnings("serial")
private static class ScaleTask extends RecursiveTask<float[]> {
private final float[] data;
private final int width;
private final int height;
private final int size;
private final int N;
private final double sigma;
ScaleTask(float[] _data, int _width, int _height, double _sigma) {
data = _data;
width = _width;
height = _height;
size = width * height;
N = Math.max(width, height);
sigma = _sigma;
}
@Override
protected float[] compute() {
ImageFilter filter = new ImageFilter(sigma, _K, N);
float[] conv = new float[size];
float[] conv2 = new float[size];
filter.gaussianConvImage(conv, data, width, height);
for (int i = 0; i < size; ++i) {
float v = data[i] - conv[i];
conv[i] = v;
conv2[i] = v * v;
}
filter.gaussianConvImage(conv2, conv2, width, height);
for (int i = 0; i < size; ++i) {
double v = Math.sqrt(conv2[i]);
if (v == 0)
v = 1;
conv[i] = Atan(KA * conv[i] / v);
}
return conv;
}
}
private static float[] multiScale(float[] data, int width, int height) {
ArrayList<ForkJoinTask<float[]>> tasks = new ArrayList<>(sigmas.length);
for (double sigma : sigmas)
tasks.add(new ScaleTask(data, width, height, sigma).fork());
int size = width * height;
float[] image = new float[size];
for (ForkJoinTask<float[]> task : tasks) {
float[] res = task.join();
for (int i = 0; i < size; ++i)
image[i] += res[i];
}
float min = 1e6f, max = -1e6f;
for (int i = 0; i < size; ++i) {
float v = image[i] / sigmas.length;
if (v > max)
max = v;
if (v < min)
min = v;
image[i] = v;
}
if (max == min)
return data;
float k = (1 - H) / (max - min);
for (int i = 0; i < size; ++i) {
image[i] = k * (image[i] - min) + H * data[i];
}
return image;
}
public static ByteBuffer mgn(ByteBuffer buf, int width, int height) {
int size = width * height;
float[] data = new float[size];
byte[] array = buf.array(); // always backed by array
for (int i = 0; i < size; ++i)
data[i] = ((array[i] + 256) & 0xFF) / 255f;
float[] image = multiScale(data, width, height);
ByteBuffer ret = ByteBuffer.allocate(size);
for (int i = 0; i < size; ++i)
ret.put((byte) MathUtils.clip(image[i] * 255 + .5f, 0, 255));
return ret.rewind();
}
public static ShortBuffer mgn(ShortBuffer buf, int width, int height) {
int size = width * height;
float[] data = new float[size];
short[] array = buf.array(); // always backed by array
for (int i = 0; i < size; ++i)
data[i] = ((array[i] + 65536) & 0xFFFF) / 65535f;
float[] image = multiScale(data, width, height);
ShortBuffer ret = ShortBuffer.allocate(size);
for (int i = 0; i < size; ++i) {
ret.put((short) MathUtils.clip(image[i] * 65535 + .5f, 0, 65535));
}
return ret.rewind();
}
}
|
package org.arachb.owlbuilder.lib;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.apache.log4j.Logger;
import org.arachb.arachadmin.IndividualBean;
import org.arachb.arachadmin.NarrativeBean;
import org.arachb.arachadmin.PublicationBean;
import org.arachb.arachadmin.TermBean;
import org.arachb.owlbuilder.Owlbuilder;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAnnotation;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLClass;
import org.semanticweb.owlapi.model.OWLClassAssertionAxiom;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLIndividual;
import org.semanticweb.owlapi.model.OWLObject;
import org.semanticweb.owlapi.model.OWLOntologyManager;
public class Individual implements NamedGeneratingEntity{
private static Logger log = Logger.getLogger(Individual.class);
private final IndividualBean bean;
public Individual(IndividualBean ib){
bean = ib;
}
private final Map<String, OWLObject> localElements = new HashMap<>();
@Override
public OWLObject generateOWL(Owlbuilder b) throws Exception{
OWLObject result = generateOWL(b,localElements);
localElements.clear();
return result;
}
@Override
public OWLObject generateOWL(Owlbuilder builder, Map<String, OWLObject> elements) throws Exception {
final OWLDataFactory factory = builder.getDataFactory();
final OWLOntologyManager manager = builder.getOntologyManager();
IRI individualIRI;
try {
String indString = bean.checkIRIString(builder.getIRIManager());
if (elements.containsKey(indString)){
return elements.get(indString);
}
individualIRI = IRI.create(indString);
OWLIndividual namedIndividual = factory.getOWLNamedIndividual(individualIRI);
builder.initializeMiscIndividual(namedIndividual);
final String label = getLabelFromNarrativeSet();
if (label != null){
log.info("Individual Bean " + indString + " has label " + label);
OWLAnnotation labelAnno = factory.getOWLAnnotation(factory.getRDFSLabel(),
factory.getOWLLiteral(label));
OWLAxiom ax = factory.getOWLAnnotationAssertionAxiom(individualIRI, labelAnno);
// Add the axiom to the ontology
manager.addAxiom(builder.getTarget(),ax);
}
final String iComment = "Individual from individual owlgeneration, id = " + bean.getId();
OWLAnnotation commentAnno = factory.getOWLAnnotation(factory.getRDFSComment(),
factory.getOWLLiteral(iComment));
OWLAxiom ax = factory.getOWLAnnotationAssertionAxiom(individualIRI, commentAnno);
manager.addAxiom(builder.getTarget(),ax);
ClassTerm ct;
if (TermBean.isCached(bean.getTerm())){
ct = new ClassTerm(TermBean.getCached(bean.getTerm()));
}
else {
throw new RuntimeException("Term " + bean.getTerm() + " for individual " + bean.getId() + " is not cached");
}
OWLClass cl = (OWLClass)ct.generateOWL(builder, elements);
OWLClassAssertionAxiom clAssertion = factory.getOWLClassAssertionAxiom(cl, namedIndividual);
builder.getOntologyManager().addAxiom(builder.getTarget(), clAssertion);
builder.initializeMiscIndividual(namedIndividual);
elements.put(indString, namedIndividual);
return namedIndividual;
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return null;
}
}
private String getLabelFromNarrativeSet(){
switch (bean.getNaratives().size()){
case 0: return bean.getLabel();
case 1: Set<Integer>nSet = bean.getNaratives();
int narrativeId = nSet.iterator().next();
NarrativeBean narrative = NarrativeBean.getCached(narrativeId);
return labelFromNarrative(narrative);
default:
return bean.getLabel() + " in multiple narratives";
}
}
private String labelFromNarrative(NarrativeBean n){
log.info("publication cache size = " + PublicationBean.cacheSize());
PublicationBean p = PublicationBean.getCached(n.getPublicationId());
String pubStr = p.getAuthorList()+"("+p.getPublicationYear()+")";
return bean.getLabel() + " in " + n.getLabel() + " contained in " + pubStr;
}
/* methods to expose bean fields */
public String getIRIString(){
return bean.getIRIString();
}
}
|
package org.jgroups.stack;
import org.jgroups.Address;
import org.jgroups.util.*;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* This retransmitter is specialized in maintaining <em>ranges of seqnos</em>, e.g. [3-20, [89-89], [100-120].
* The ranges are stored in a sorted hashmap and the {@link Comparable#compareTo(Object)} method compares both ranges
* again ranges, and ranges against seqnos. The latter helps to find a range given a seqno, e.g. seqno 105 will find
* range [100-120].<p/>
* Each range is implemented by {@link org.jgroups.util.SeqnoRange}, which has a bitset of all missing seqnos. When
* a seqno is received, that bit set is updated; the bit corresponding to the seqno is set to 1. A task linked to
* the range periodically retransmits missing messages.<p/>
* When all bits are 1 (= all messages have been received), the range is removed from the hashmap and the retransmission
* task is cancelled.
*
* @author Bela Ban
* @version $Id: RangeBasedRetransmitter.java,v 1.5 2009/11/30 11:43:15 belaban Exp $
*/
public class RangeBasedRetransmitter extends Retransmitter {
// todo: when JDK 6 is the baseline, convert the TreeMap to a TreeSet or ConcurrentSkipListSet and use ceiling()
/** Sorted hashmap storing the ranges */
private final Map<Seqno,Seqno> ranges=Collections.synchronizedSortedMap(new TreeMap<Seqno,Seqno>(new SeqnoComparator()));
/** Association between ranges and retransmission tasks */
private final Map<Seqno,Task> tasks=new ConcurrentHashMap<Seqno,Task>();
/**
* Create a new Retransmitter associated with the given sender address
* @param sender the address from which retransmissions are expected or to which retransmissions are sent
* @param cmd the retransmission callback reference
* @param sched retransmissions scheduler
*/
public RangeBasedRetransmitter(Address sender, RetransmitCommand cmd, TimeScheduler sched) {
super(sender, cmd, sched);
}
/**
* Add the given range [first_seqno, last_seqno] in the list of
* entries eligible for retransmission. If first_seqno > last_seqno,
* then the range [last_seqno, first_seqno] is added instead
*/
public void add(long first_seqno, long last_seqno) {
if(first_seqno > last_seqno) {
long tmp=first_seqno;
first_seqno=last_seqno;
last_seqno=tmp;
}
// create a single seqno if we have no range or else a SeqnoRange
Seqno range=first_seqno == last_seqno? new Seqno(first_seqno) : new SeqnoRange(first_seqno, last_seqno);
// each task needs its own retransmission interval, as they are stateful *and* mutable, so we *need* to copy !
RangeTask new_task=new RangeTask(range, RETRANSMIT_TIMEOUTS.copy(), cmd, sender);
Seqno old_range=ranges.put(range, range);
if(old_range != null)
log.error("new range " + range + " overlaps with old range " + old_range);
tasks.put(range, new_task);
new_task.doSchedule(); // Entry adds itself to the timer
if(log.isTraceEnabled())
log.trace("added range " + sender + " [" + range + "]");
}
/**
* Remove the given sequence number from the list of seqnos eligible
* for retransmission. If there are no more seqno intervals in the
* respective entry, cancel the entry from the retransmission
* scheduler and remove it from the pending entries
*/
public int remove(long seqno) {
int retval=0;
Seqno range=ranges.get(new Seqno(seqno, true));
if(range == null)
return 0;
range.set(seqno);
if(log.isTraceEnabled())
log.trace("removed " + sender + " #" + seqno + " from retransmitter");
// if the range has no missing messages, get the associated task and cancel it
if(range.getNumberOfMissingMessages() == 0) {
Task task=tasks.remove(range);
if(task != null) {
task.cancel();
retval=task.getNumRetransmits();
}
else
log.error("task for range " + range + " not found");
ranges.remove(range);
if(log.isTraceEnabled())
log.trace("all messages for " + sender + " [" + range + "] have been received; removing range");
}
return retval;
}
/**
* Reset the retransmitter: clear all msgs and cancel all the
* respective tasks
*/
public void reset() {
synchronized(ranges) {
for(Seqno range: ranges.keySet()) {
// get task associated with range and cancel it
Task task=tasks.get(range);
if(task != null) {
task.cancel();
tasks.remove(range);
}
}
ranges.clear();
}
for(Task task: tasks.values())
task.cancel();
}
public String toString() {
int missing_msgs=0;
synchronized(ranges) {
for(Seqno range: ranges.keySet()) {
missing_msgs+=range.getNumberOfMissingMessages();
}
}
StringBuilder sb=new StringBuilder();
sb.append(missing_msgs).append(" messages to retransmit");
if(missing_msgs < 50) {
Collection<Range> all_missing_msgs=new LinkedList<Range>();
for(Seqno range: ranges.keySet()) {
all_missing_msgs.addAll(range.getMessagesToRetransmit());
}
sb.append(": ").append(all_missing_msgs);
}
return sb.toString();
}
public int size() {
int retval=0;
synchronized(ranges) {
for(Seqno range: ranges.keySet()) {
retval+=range.getNumberOfMissingMessages();
}
}
return retval;
}
protected class RangeTask extends Task {
protected final Seqno range;
protected RangeTask(Seqno range, Interval intervals, RetransmitCommand cmd, Address msg_sender) {
super(intervals, cmd, msg_sender);
this.range=range;
}
public String toString() {
return range.toString();
}
protected void callRetransmissionCommand() {
Collection<Range> missing=range.getMessagesToRetransmit();
if(missing.isEmpty()) {
cancel();
}
else {
for(Range range: missing) {
command.retransmit(range.low, range.high, msg_sender);
}
}
}
}
}
|
package org.basex.gui.view.editor;
import static org.basex.core.Text.*;
import static org.basex.gui.GUIConstants.*;
import static org.basex.util.Token.*;
import java.awt.*;
import java.awt.event.*;
import java.io.*;
import java.util.*;
import java.util.regex.*;
import javax.swing.*;
import javax.swing.event.*;
import org.basex.core.*;
import org.basex.data.*;
import org.basex.gui.*;
import org.basex.gui.GUIConstants.Fill;
import org.basex.gui.GUIConstants.Msg;
import org.basex.gui.dialog.*;
import org.basex.gui.editor.Editor.Action;
import org.basex.gui.editor.*;
import org.basex.gui.layout.*;
import org.basex.gui.layout.BaseXFileChooser.Mode;
import org.basex.gui.layout.BaseXLayout.DropHandler;
import org.basex.gui.view.*;
import org.basex.io.*;
import org.basex.util.*;
import org.basex.util.list.*;
public final class EditorView extends View {
/** Number of files in the history. */
private static final int HISTORY = 18;
/** Number of files in the compact history. */
private static final int HISTCOMP = 7;
/** XQuery error pattern. */
private static final Pattern XQERROR = Pattern.compile(
"(.*?), ([0-9]+)/([0-9]+)" + COL);
/** XML error pattern. */
private static final Pattern XMLERROR = Pattern.compile(
LINE_X.replaceAll("%", "(.*?)") + COL + ".*");
/** Error information pattern. */
private static final Pattern ERRORINFO = Pattern.compile(
"^.*\r?\n\\[.*?\\] |" + LINE_X.replaceAll("%", ".*?") + COLS + "|\r?\n.*",
Pattern.DOTALL);
/** Error tooltip pattern. */
private static final Pattern ERRORTT = Pattern.compile(
"^.*\r?\n" + STOPPED_AT + "|\r?\n" + STACK_TRACE_C + ".*", Pattern.DOTALL);
/** Search bar. */
final SearchBar search;
/** History Button. */
final BaseXButton hist;
/** Execute Button. */
final BaseXButton stop;
/** Info label. */
final BaseXLabel info;
/** Position label. */
final BaseXLabel pos;
/** Query area. */
final BaseXTabs tabs;
/** Execute button. */
final BaseXButton go;
/** Thread counter. */
int threadID;
/** File in which the most recent error occurred. */
IO errFile;
/** Last error message. */
private String errMsg;
/** Most recent error position; used for clicking on error message. */
private int errPos;
/** Header string. */
private final BaseXLabel label;
/** Filter button. */
private final BaseXButton filter;
/**
* Default constructor.
* @param man view manager
*/
public EditorView(final ViewNotifier man) {
super(EDITORVIEW, man);
border(5).layout(new BorderLayout());
label = new BaseXLabel(EDITOR, true, false);
label.setForeground(GUIConstants.GRAY);
final BaseXButton openB = BaseXButton.command(GUICommands.C_EDITOPEN, gui);
final BaseXButton saveB = new BaseXButton(gui, "save", H_SAVE);
hist = new BaseXButton(gui, "hist", H_RECENTLY_OPEN);
final BaseXButton srch = new BaseXButton(gui, "search",
BaseXLayout.addShortcut(H_REPLACE, BaseXKeys.FIND.toString()));
stop = new BaseXButton(gui, "stop", H_STOP_PROCESS);
stop.addKeyListener(this);
stop.setEnabled(false);
go = new BaseXButton(gui, "go",
BaseXLayout.addShortcut(H_REPLACE, BaseXKeys.EXEC.toString()));
go.addKeyListener(this);
filter = BaseXButton.command(GUICommands.C_FILTER, gui);
filter.addKeyListener(this);
filter.setEnabled(false);
final BaseXBack buttons = new BaseXBack(Fill.NONE);
buttons.layout(new TableLayout(1, 8, 1, 0)).border(0, 0, 8, 0);
buttons.add(openB);
buttons.add(saveB);
buttons.add(hist);
buttons.add(srch);
buttons.add(Box.createHorizontalStrut(6));
buttons.add(stop);
buttons.add(go);
buttons.add(filter);
final BaseXBack b = new BaseXBack(Fill.NONE).layout(new BorderLayout());
b.add(buttons, BorderLayout.WEST);
b.add(label, BorderLayout.EAST);
add(b, BorderLayout.NORTH);
tabs = new BaseXTabs(gui);
tabs.setFocusable(Prop.MAC);
final SearchEditor se = new SearchEditor(gui, tabs, null).button(srch);
search = se.bar();
addCreateTab();
add(se, BorderLayout.CENTER);
// status and query pane
search.editor(addTab(), false);
info = new BaseXLabel().setText(OK, Msg.SUCCESS);
pos = new BaseXLabel(" ");
posCode.invokeLater();
final BaseXBack south = new BaseXBack(Fill.NONE).border(10, 0, 2, 0);
south.layout(new BorderLayout(4, 0));
south.add(info, BorderLayout.CENTER);
south.add(pos, BorderLayout.EAST);
add(south, BorderLayout.SOUTH);
refreshLayout();
// add listeners
saveB.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
final JPopupMenu pop = new JPopupMenu();
final StringBuilder mnem = new StringBuilder();
final JMenuItem sa = GUIMenu.newItem(GUICommands.C_EDITSAVE, gui, mnem);
final JMenuItem sas = GUIMenu.newItem(GUICommands.C_EDITSAVEAS, gui, mnem);
GUICommands.C_EDITSAVE.refresh(gui, sa);
GUICommands.C_EDITSAVEAS.refresh(gui, sas);
pop.add(sa);
pop.add(sas);
pop.show(saveB, 0, saveB.getHeight());
}
});
hist.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
final JPopupMenu pm = new JPopupMenu();
ActionListener al = new ActionListener() {
@Override
public void actionPerformed(final ActionEvent ac) {
// rewrite and open chosen file
final String s = ac.getActionCommand().replaceAll("(.*) \\[(.*)\\]", "$2/$1");
open(new IOFile(s), true);
}
};
// create popup menu with of recently opened files
final StringList opened = new StringList();
for(final EditorArea ea : editors()) opened.add(ea.file.path());
final StringList files = new StringList(HISTORY);
final StringList all = new StringList(gui.gprop.strings(GUIProp.EDITOR));
final int fl = Math.min(all.size(), e == null ? HISTORY : HISTCOMP);
for(int f = 0; f < fl; f++) files.add(all.get(f));
Font f = null;
for(final String en : files.sort(Prop.CASE)) {
// disable opened files
final JMenuItem it = new JMenuItem(en.replaceAll("(.*)[/\\\\](.*)", "$2 [$1]"));
if(opened.contains(en)) {
if(f == null) f = it.getFont().deriveFont(Font.BOLD);
it.setFont(f);
}
pm.add(it).addActionListener(al);
}
al = new ActionListener() {
@Override
public void actionPerformed(final ActionEvent ac) {
hist.getActionListeners()[0].actionPerformed(null);
}
};
if(e != null && pm.getComponentCount() == HISTCOMP) {
pm.add(new JMenuItem("...")).addActionListener(al);
}
pm.show(hist, 0, hist.getHeight());
}
});
refreshHistory(null);
info.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(final MouseEvent e) {
jumpToError();
}
});
stop.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
stop.setEnabled(false);
go.setEnabled(false);
gui.stop();
}
});
go.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
getEditor().release(Action.EXECUTE);
}
});
tabs.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(final ChangeEvent e) {
final EditorArea ea = getEditor();
if(ea == null) return;
search.editor(ea, true);
gui.refreshControls();
posCode.invokeLater();
}
});
BaseXLayout.addDrop(this, new DropHandler() {
@Override
public void drop(final Object file) {
if(file instanceof File) open(new IOFile((File) file), true);
}
});
}
@Override
public void refreshInit() { }
@Override
public void refreshFocus() { }
@Override
public void refreshMark() {
final EditorArea edit = getEditor();
go.setEnabled(edit.script || !gui.gprop.is(GUIProp.EXECRT));
final Nodes mrk = gui.context.marked;
filter.setEnabled(!gui.gprop.is(GUIProp.FILTERRT) && mrk != null && mrk.size() != 0);
}
@Override
public void refreshContext(final boolean more, final boolean quick) { }
@Override
public void refreshLayout() {
label.border(-6, 0, 0, 2).setFont(GUIConstants.lfont);
for(final EditorArea edit : editors()) edit.setFont(GUIConstants.mfont);
search.refreshLayout();
final Font ef = GUIConstants.font.deriveFont(7f + (GUIConstants.fontSize >> 1));
info.setFont(ef);
pos.setFont(ef);
}
@Override
public void refreshUpdate() { }
@Override
public boolean visible() {
return gui.gprop.is(GUIProp.SHOWEDITOR);
}
@Override
public void visible(final boolean v) {
gui.gprop.set(GUIProp.SHOWEDITOR, v);
}
@Override
protected boolean db() {
return false;
}
/**
* Opens a new file.
*/
public void open() {
// open file chooser for XML creation
final BaseXFileChooser fc = new BaseXFileChooser(OPEN,
gui.gprop.get(GUIProp.WORKPATH), gui);
fc.filter(XQUERY_FILES, IO.XQSUFFIXES);
fc.filter(BXS_FILES, IO.BXSSUFFIX);
fc.textFilters();
final IOFile[] files = fc.multi().selectAll(Mode.FOPEN);
for(final IOFile f : files) open(f, true);
}
/**
* Reverts the contents of the currently opened editor.
*/
public void reopen() {
getEditor().reopen(true);
}
/**
* Saves the contents of the currently opened editor.
* @return {@code false} if operation was canceled
*/
public boolean save() {
final EditorArea edit = getEditor();
return edit.opened() ? save(edit.file) : saveAs();
}
/**
* Saves the contents of the currently opened editor under a new name.
* @return {@code false} if operation was canceled
*/
public boolean saveAs() {
// open file chooser for XML creation
final EditorArea edit = getEditor();
final String path = edit.opened() ? edit.file.path() :
gui.gprop.get(GUIProp.WORKPATH);
final BaseXFileChooser fc = new BaseXFileChooser(SAVE_AS, path, gui);
fc.filter(XQUERY_FILES, IO.XQSUFFIXES);
fc.filter(BXS_FILES, IO.BXSSUFFIX);
fc.textFilters();
fc.suffix(IO.XQSUFFIX);
final IOFile file = fc.select(Mode.FSAVE);
return file != null && save(file);
}
/**
* Creates a new file.
*/
public void newFile() {
addTab();
refreshControls(true);
}
/**
* Opens the specified query file.
* @param file query file
* @param parse parse contents
* @return opened editor, or {@code null} if file could not be opened
*/
public EditorArea open(final IO file, final boolean parse) {
if(!visible()) GUICommands.C_SHOWEDITOR.execute(gui);
EditorArea edit = find(file, true);
if(edit != null) {
// display open file
tabs.setSelectedComponent(edit);
edit.reopen(true);
} else {
try {
final byte[] text = file.read();
// get current editor
edit = getEditor();
// create new tab if current text is stored on disk or has been modified
if(edit.opened() || edit.modified) edit = addTab();
edit.initText(text);
edit.file(file);
if(parse) edit.release(Action.PARSE);
} catch(final IOException ex) {
refreshHistory(null);
BaseXDialog.error(gui, FILE_NOT_OPENED);
return null;
}
}
return edit;
}
/**
* Refreshes the list of recent query files and updates the query path.
* @param file new file
*/
void refreshHistory(final IO file) {
final StringList paths = new StringList();
String path = null;
if(file != null) {
path = file.path();
gui.gprop.set(GUIProp.WORKPATH, file.dirPath());
paths.add(path);
tabs.setToolTipTextAt(tabs.getSelectedIndex(), path);
}
final String[] old = gui.gprop.strings(GUIProp.EDITOR);
for(int p = 0; paths.size() < HISTORY && p < old.length; p++) {
final IO fl = IO.get(old[p]);
if(fl.exists() && !fl.eq(file)) paths.add(fl.path());
}
// store sorted history
gui.gprop.set(GUIProp.EDITOR, paths.toArray());
hist.setEnabled(!paths.isEmpty());
}
/**
* Closes an editor.
* @param edit editor to be closed. {@code null} closes the currently
* opened editor.
* @return {@code true} if editor was closed
*/
public boolean close(final EditorArea edit) {
final EditorArea ea = edit != null ? edit : getEditor();
if(!confirm(ea)) return false;
tabs.remove(ea);
final int t = tabs.getTabCount();
final int i = tabs.getSelectedIndex();
if(t == 1) {
// reopen single tab
addTab();
} else if(i + 1 == t) {
// if necessary, activate last editor tab
tabs.setSelectedIndex(i - 1);
}
return true;
}
/**
* Jumps to a specific line.
*/
public void gotoLine() {
final EditorArea edit = getEditor();
final int ll = edit.last.length;
final int cr = edit.getCaret();
int l = 1;
for(int e = 0; e < ll && e < cr; e += cl(edit.last, e)) {
if(edit.last[e] == '\n') ++l;
}
final DialogLine dl = new DialogLine(gui, l);
if(!dl.ok()) return;
final int el = dl.line();
int p = 0;
l = 1;
for(int e = 0; e < ll && l < el; e += cl(edit.last, e)) {
if(edit.last[e] != '\n') continue;
p = e + 1;
++l;
}
edit.setCaret(p);
posCode.invokeLater();
}
/**
* Starts a thread, which shows a waiting info after a short timeout.
*/
public void start() {
final int thread = threadID;
new Thread() {
@Override
public void run() {
Performance.sleep(200);
if(thread == threadID) {
info.setText(PLEASE_WAIT_D, Msg.SUCCESS).setToolTipText(null);
stop.setEnabled(true);
}
}
}.start();
}
/**
* Evaluates the info message resulting from a parsed or executed query.
* @param msg info message
* @param ok {@code true} if evaluation was successful
* @param refresh refresh buttons
*/
public void info(final String msg, final boolean ok, final boolean refresh) {
// do not refresh view when query is running
if(!refresh && stop.isEnabled()) return;
++threadID;
errPos = -1;
errFile = null;
errMsg = null;
getEditor().resetError();
if(refresh) {
stop.setEnabled(false);
refreshMark();
}
if(ok) {
info.setCursor(GUIConstants.CURSORARROW);
info.setText(msg, Msg.SUCCESS).setToolTipText(null);
} else {
error(msg, false);
info.setCursor(GUIConstants.CURSORHAND);
info.setText(ERRORINFO.matcher(msg).replaceAll(""), Msg.ERROR);
final String tt = ERRORTT.matcher(msg).replaceAll("").
replace("<", "<").replace(">", ">").
replaceAll("\r?\n", "<br/>").replaceAll("(<br/>.*?)<br/>.*", "$1");
info.setToolTipText("<html>" + tt + "</html>");
}
}
/**
* Jumps to the current error.
*/
void jumpToError() {
if(errMsg != null) error(true);
}
/**
* Handles info messages resulting from a query execution.
* @param jump jump to error position
* @param msg info message
*/
public void error(final String msg, final boolean jump) {
errMsg = msg;
for(final String s : msg.split("\r?\n")) {
if(XQERROR.matcher(s).matches()) {
errMsg = s.replace(STOPPED_AT, "");
break;
}
}
error(jump);
}
/**
* Handles info messages resulting from a query execution.
* @param jump jump to error position
*/
private void error(final boolean jump) {
Matcher m = XQERROR.matcher(errMsg);
int el, ec = 2;
if(m.matches()) {
errFile = new IOFile(m.group(1));
el = Token.toInt(m.group(2));
ec = Token.toInt(m.group(3));
} else {
m = XMLERROR.matcher(errMsg);
if(!m.matches()) return;
el = Token.toInt(m.group(1));
errFile = getEditor().file;
}
EditorArea edit = find(errFile, false);
if(jump) {
if(edit == null) edit = open(errFile, false);
if(edit != null) tabs.setSelectedComponent(edit);
}
if(edit == null) return;
// find approximate error position
final int ll = edit.last.length;
int ep = ll;
for(int p = 0, l = 1, c = 1; p < ll; ++c, p += cl(edit.last, p)) {
if(l > el || l == el && c == ec) {
ep = p;
break;
}
if(edit.last[p] == '\n') {
++l;
c = 0;
}
}
if(ep < ll && Character.isLetterOrDigit(cp(edit.last, ep))) {
while(ep > 0 && Character.isLetterOrDigit(cp(edit.last, ep - 1))) ep
}
edit.error(ep);
errPos = ep;
if(jump) {
edit.jumpError(errPos);
posCode.invokeLater();
}
}
/**
* Shows a quit dialog for all modified query files.
* @return {@code false} if confirmation was canceled
*/
public boolean confirm() {
for(final EditorArea edit : editors()) {
tabs.setSelectedComponent(edit);
if(!close(edit)) return false;
}
return true;
}
/**
* Checks if the current text can be saved or reverted.
* @return result of check
*/
public boolean modified() {
final EditorArea edit = getEditor();
return edit.modified || !edit.opened();
}
/**
* Returns the current editor.
* @return editor
*/
public EditorArea getEditor() {
final Component c = tabs.getSelectedComponent();
return c instanceof EditorArea ? (EditorArea) c : null;
}
/**
* Refreshes the query modification flag.
* @param force action
*/
void refreshControls(final boolean force) {
// update modification flag
final EditorArea edit = getEditor();
final boolean oe = edit.modified;
edit.modified = edit.hist != null && edit.hist.modified();
if(edit.modified == oe && !force) return;
// update tab title
String title = edit.file.name();
if(edit.modified) title += '*';
edit.label.setText(title);
// update components
gui.refreshControls();
posCode.invokeLater();
}
/** Code for setting cursor position. */
final GUICode posCode = new GUICode() {
@Override
public void eval(final Object arg) {
final int[] lc = getEditor().pos();
pos.setText(lc[0] + " : " + lc[1]);
}
};
/**
* Finds the editor that contains the specified file.
* @param file file to be found
* @param opened considers only opened files
* @return editor
*/
EditorArea find(final IO file, final boolean opened) {
for(final EditorArea edit : editors()) {
if(edit.file.eq(file) && (!opened || edit.opened())) return edit;
}
return null;
}
/**
* Saves the specified editor contents.
* @param file file to write
* @return {@code false} if confirmation was canceled
*/
private boolean save(final IO file) {
try {
final EditorArea edit = getEditor();
((IOFile) file).write(edit.getText());
edit.file(file);
return true;
} catch(final Exception ex) {
BaseXDialog.error(gui, FILE_NOT_SAVED);
return false;
}
}
/**
* Choose a unique tab file.
* @return io reference
*/
private IOFile newTabFile() {
// collect numbers of existing files
final BoolList bl = new BoolList();
for(final EditorArea edit : editors()) {
if(edit.opened()) continue;
final String n = edit.file.name().substring(FILE.length());
bl.set(n.isEmpty() ? 1 : Integer.parseInt(n), true);
}
// find first free file number
int c = 0;
while(++c < bl.size() && bl.get(c));
// create io reference
return new IOFile(gui.gprop.get(GUIProp.WORKPATH), FILE + (c == 1 ? "" : c));
}
/**
* Adds a new editor tab.
* @return editor reference
*/
EditorArea addTab() {
final EditorArea edit = new EditorArea(this, newTabFile());
edit.setFont(GUIConstants.mfont);
final BaseXBack tab = new BaseXBack(new BorderLayout(10, 0)).mode(Fill.NONE);
tab.add(edit.label, BorderLayout.CENTER);
final BaseXButton close = tabButton("e_close");
close.setRolloverIcon(BaseXLayout.icon("e_close2"));
close.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
close(edit);
}
});
tab.add(close, BorderLayout.EAST);
tabs.add(edit, tab, tabs.getComponentCount() - 2);
return edit;
}
/**
* Adds a tab for creating new tabs.
*/
private void addCreateTab() {
final BaseXButton add = tabButton("e_new");
add.setRolloverIcon(BaseXLayout.icon("e_new2"));
add.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent e) {
addTab();
refreshControls(true);
}
});
tabs.add(new BaseXBack(), add, 0);
tabs.setEnabledAt(0, false);
}
/**
* Adds a new tab button.
* @param icon button icon
* @return button
*/
private BaseXButton tabButton(final String icon) {
final BaseXButton b = new BaseXButton(gui, icon, null);
b.border(2, 2, 2, 2).setContentAreaFilled(false);
b.setFocusable(false);
return b;
}
/**
* Shows a quit dialog for the specified editor.
* @param edit editor to be saved
* @return {@code false} if confirmation was canceled
*/
private boolean confirm(final EditorArea edit) {
if(edit.modified && (edit.opened() || edit.getText().length != 0)) {
final Boolean ok = BaseXDialog.yesNoCancel(gui,
Util.info(CLOSE_FILE_X, edit.file.name()));
if(ok == null || ok && !save()) return false;
}
return true;
}
/**
* Returns all editors.
* @return editors
*/
EditorArea[] editors() {
final ArrayList<EditorArea> edits = new ArrayList<EditorArea>();
for(final Component c : tabs.getComponents()) {
if(c instanceof EditorArea) edits.add((EditorArea) c);
}
return edits.toArray(new EditorArea[edits.size()]);
}
}
|
package org.nees.buffalo.rdv.action;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import org.nees.buffalo.rdv.rbnb.LocalServer;
import org.nees.buffalo.rdv.rbnb.RBNBController;
import org.nees.buffalo.rdv.ui.MessagePopup;
/**
* Action to control offline mode.
*
* @author Jason P. Hanley
*/
public class OfflineAction extends DataViewerAction {
public OfflineAction() {
super(
"Work offline",
"View data locally",
KeyEvent.VK_W);
}
/**
* Respond to an event for this action. This will start or stop the local RBNB
* server.
*/
public void actionPerformed(ActionEvent ae) {
if (isSelected()) {
stopServer();
} else {
startServer();
}
}
/**
* Start the local server and connect to it.
*/
public void startServer() {
RBNBController rbnb = RBNBController.getInstance();
LocalServer server = LocalServer.getInstance();
rbnb.disconnect();
try {
server.startServer();
} catch (Exception e) {
e.printStackTrace();
MessagePopup.getInstance().showError("Failed to start local data server for offline usage.");
return;
}
rbnb.setRBNBHostName("localhost");
rbnb.setRBNBPortNumber(3333);
rbnb.connect();
setSelected(true);
}
/**
* Disconnect from the local server and stop it.
*/
public void stopServer() {
RBNBController rbnb = RBNBController.getInstance();
LocalServer server = LocalServer.getInstance();
rbnb.disconnect();
try {
server.stopServer();
} catch (Exception e) {
e.printStackTrace();
}
setSelected(false);
}
}
|
package org.nees.buffalo.rdv.ui;
import java.awt.BorderLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.border.EmptyBorder;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import org.nees.buffalo.rdv.DataViewer;
import org.nees.buffalo.rdv.rbnb.RBNBController;
import org.nees.rbnb.marker.EventMarker;
import com.jgoodies.uif_lite.panel.SimpleInternalFrame;
/**
* A panel that contains UI elements to collect the data needed to create an
* event marker.
*
* @author Lawrence J. Miller
* @author Jason P. Hanley
*/
public class MarkerSubmitPanel extends JPanel {
/**
* The rbnb controller to interface with the server
*/
private RBNBController rbnbController;
/**
* The text field to collect the content of the marker
*/
private JTextField markerContentField;
/**
* The button to submit the marker with
*/
private JButton markerSubmitButton;
/**
* The time at which the user started to describe the event
*/
double startTime;
/**
* Creates the marker submit panel with a content field and a submit button.
*
* @param rbnbController the rbnb controller to use for sending the marker
*/
public MarkerSubmitPanel(RBNBController rbnbController) {
super();
this.rbnbController = rbnbController;
initPanel();
startTime = -1;
}
/**
* Create the UI.
*/
private void initPanel() {
setBorder(null);
setLayout(new BorderLayout());
JPanel p = new JPanel();
p.setBorder(new EmptyBorder(5,5,5,5));
p.setLayout(new BorderLayout(5, 5));
markerContentField = new JTextField();
markerContentField.setToolTipText("Describe the event");
markerContentField.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent ae) {
submitMarker();
}
});
// See when text is first entered so we can get an accurate timestamp for
// marker submission.
markerContentField.getDocument().addDocumentListener(new DocumentListener() {
public void insertUpdate(DocumentEvent de) {
if (startTime == -1) {
startTime = rbnbController.getLocation();
}
}
public void removeUpdate(DocumentEvent de) {
// reset the start time if all text is removed
if (de.getDocument().getLength() == 0) {
startTime = -1;
}
}
public void changedUpdate(DocumentEvent de) {}
});
p.add(markerContentField, BorderLayout.CENTER);
markerSubmitButton = new JButton("Submit");
markerSubmitButton.setToolTipText("Mark this event");
markerSubmitButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent ae) {
submitMarker();
}
});
p.add(markerSubmitButton, BorderLayout.EAST);
SimpleInternalFrame sif = new SimpleInternalFrame(
DataViewer.getIcon("icons/info.gif"),
"Marker Panel",
null,
p);
add(sif, BorderLayout.CENTER);
}
/**
* Submit an event marker with the text in the text field as it's content. If
* an error occurs, a dialog will be shown describing the error.
*/
private void submitMarker() {
String content = markerContentField.getText();
// only submit marker in contents is not empty
if (content == null || content.length() == 0) {
JOptionPane.showMessageDialog(this,
"The event marker was not submitted since there was no content.\n" +
"Please describe the event using the text area in the marker panel.",
"Marker Not Submitted",
JOptionPane.WARNING_MESSAGE);
return;
}
EventMarker marker = new EventMarker();
marker.setProperty ("type", "annotation");
marker.setProperty("content", content);
if (startTime == -1) {
startTime = rbnbController.getLocation();
}
marker.setProperty("timestamp", Double.toString(startTime));
try {
rbnbController.getMarkerManager().putMarker(marker);
} catch (Exception e) {
JOptionPane.showMessageDialog(this, "Failed to submit event marker.", "Marker Submission Error", JOptionPane.ERROR_MESSAGE);
e.printStackTrace();
}
markerContentField.setText(null);
}
/**
* Enable or disable the component. When disabled no user input can be made.
*
* @param enabled if true, enable the component, otherwise disable the
* component
*/
public void setEnabled(boolean enabled) {
super.setEnabled(enabled);
markerContentField.setEnabled(enabled);
markerSubmitButton.setEnabled(enabled);
}
}
|
package org.ohmage.domain.campaign;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SimpleTimeZone;
import java.util.TimeZone;
import org.apache.log4j.Logger;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.ohmage.annotator.Annotator.ErrorCode;
import org.ohmage.domain.Location;
import org.ohmage.domain.campaign.Response.NoResponse;
import org.ohmage.domain.campaign.prompt.CustomChoicePrompt;
import org.ohmage.domain.campaign.prompt.MultiChoiceCustomPrompt;
import org.ohmage.domain.campaign.prompt.SingleChoiceCustomPrompt;
import org.ohmage.exception.ErrorCodeException;
import org.ohmage.util.StringUtils;
import org.ohmage.util.TimeUtils;
/**
* This class is responsible for converting an uploaded or database-stored copy
* of a survey response including the metadata about the response as well as
* the individual prompt responses.
*
* @author John Jenkins
* @author Joshua Selsky
*/
public class SurveyResponse {
private static Logger LOGGER = Logger.getLogger(SurveyResponse.class);
private static final String JSON_KEY_USERNAME = "user";
private static final String JSON_KEY_CAMPAIGN_ID = "campaign_id";
private static final String JSON_KEY_CLIENT = "client";
private static final String JSON_KEY_DATE = "timestamp";
private static final String JSON_KEY_TIME = "time";
private static final String JSON_KEY_TIMEZONE = "timezone";
private static final String JSON_KEY_LOCATION_STATUS = "location_status";
private static final String JSON_KEY_LOCATION = "location";
private static final String JSON_KEY_SURVEY_ID = "survey_id";
private static final String JSON_KEY_SURVEY_NAME = "survey_title";
private static final String JSON_KEY_SURVEY_DESCRIPTION = "survey_description";
private static final String JSON_KEY_SURVEY_LAUNCH_CONTEXT = "survey_launch_context";
// TODO - I added the short and long keys because that's how the
// original spec worked and the Android app was breaking with only
// survey_launch_context. We can revisit for 2.9. -Josh
private static final String JSON_KEY_SURVEY_LAUNCH_CONTEXT_SHORT = "launch_context_short";
private static final String JSON_KEY_SURVEY_LAUNCH_CONTEXT_LONG = "launch_context_long";
private static final String JSON_KEY_RESPONSES = "responses";
private static final String JSON_KEY_PRIVACY_STATE = "privacy_state";
private static final String JSON_KEY_SURVEY_RESPONSE_ID = "survey_key";
private static final String JSON_KEY_PROMPT_ID = "prompt_id";
private static final String JSON_KEY_REPEATABLE_SET_ID = "repeatable_set_id";
private static final String JSON_KEY_PROMPT_VALUE = "value";
private final String username;
private final String campaignId;
private final String client;
private final Date date;
private final long time;
private final TimeZone timezone;
/**
* The possible status values of a location.
*
* @author John Jenkins
*/
public static enum LocationStatus {
VALID,
NETWORK,
INACCURATE,
STALE,
UNAVAILABLE;
/**
* Returns the location status value as an all-lower-case string.
*/
@Override
public String toString() {
return name().toLowerCase();
}
}
private final LocationStatus locationStatus;
private final Location location;
private final Survey survey;
private final long surveyResponseId;
private final Map<Integer, Response> responses;
/**
* Survey response privacy states.
*
* @author John Jenkins
*/
public static enum PrivacyState {
PRIVATE,
SHARED,
INVISIBLE;
public static PrivacyState getValue(final String privacyState) {
return valueOf(privacyState.toUpperCase());
}
/**
* Converts the privacy state to a nice, human-readable format.
*/
@Override
public String toString() {
return name().toLowerCase();
}
}
private final PrivacyState privacyState;
/**
* Context information gathered by the phone when this survey was launched.
*
* @author John Jenkins
*/
public static final class LaunchContext {
private static final String JSON_KEY_LAUNCH_TIME = "launch_time";
private static final String JSON_KEY_ACTIVE_TRIGGERS = "active_triggers";
private final Date launchTime;
// TODO: I was hoping to avoid keeping and JSON in the system and only
// using it as a serialization format. However, this is never
// referenced in the code and decoding the JSON only to recode it again
// introduces some unnecessary overhead, so for now it doesn't really
// matter.
private final JSONArray activeTriggers;
/**
* Creates a LaunchContext object from a JSONObject.
*
* @param launchContext A JSONObject that contains the launch context
* information.
*
* @throws ErrorCodeException Thrown if the launchContext is null or if
* the launchContext is missing any required
* keys (launch_time and active_triggers)
*/
private LaunchContext(final JSONObject launchContext) throws ErrorCodeException {
if(launchContext == null) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_LAUNCH_CONTEXT, "The launch context cannot be null.");
}
try {
launchTime = StringUtils.decodeDateTime(launchContext.getString(JSON_KEY_LAUNCH_TIME));
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_LAUNCH_CONTEXT, "launch_time is missing or incorrect in the survey_launch_context.");
}
try {
activeTriggers = launchContext.getJSONArray(JSON_KEY_ACTIVE_TRIGGERS);
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_LAUNCH_CONTEXT, "active_triggers array is missing from survey_launch_context.");
}
}
/**
* Creates a new LaunchContext.
*
* @param launchTime The time that the survey was launched.
*
* @param activeTriggers A possibly null list of trigger IDs that
* were active when the survey was launched.
*/
public LaunchContext(final Date launchTime, final JSONArray activeTriggers) {
if(launchTime == null) {
throw new IllegalArgumentException("The launch time cannot be null.");
}
if(activeTriggers == null) {
throw new IllegalArgumentException("The activeTriggers array cannot be null.");
}
this.launchTime = launchTime;
this.activeTriggers = activeTriggers;
}
/**
* Returns a new Date object that represents this launch time.
*
* @return A new Date object that represents this launch time.
*/
public final Date getLaunchTime() {
return launchTime;
}
/**
* Returns a new List object that contains all of the active triggers.
*
* @return A new List object that contains all of the active triggers.
*/
public final JSONArray getActiveTriggers() {
return activeTriggers;
}
/**
* Creates a JSONObject that represents this object.
*
* @param longVersion Whether or not include the extra data or just the
* basics, "false" is just the basics "true" is
* everything.
*
* @return A JSONObject that represents this object or null if there
* was an error.
*/
public final JSONObject toJson(final boolean longVersion) {
try {
JSONObject result = new JSONObject();
result.put(JSON_KEY_LAUNCH_TIME, TimeUtils.getIso8601DateTimeString(launchTime));
if(longVersion) {
result.put(JSON_KEY_ACTIVE_TRIGGERS, activeTriggers);
}
return result;
}
catch(JSONException e) {
LOGGER.warn("Could not create JSON from launch context", e);
return null;
}
}
/**
* Generates a hash code for this launch context.
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime
* result
+ ((activeTriggers == null) ? 0 : activeTriggers.hashCode());
result = prime * result
+ ((launchTime == null) ? 0 : launchTime.hashCode());
return result;
}
/**
* Compares this launch context to another object and returns true only
* if both objects are logically the same.
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
LaunchContext other = (LaunchContext) obj;
if (activeTriggers == null) {
if (other.activeTriggers != null)
return false;
} else if (!activeTriggers.equals(other.activeTriggers))
return false;
if (launchTime == null) {
if (other.launchTime != null)
return false;
} else if (!launchTime.equals(other.launchTime))
return false;
return true;
}
}
private final LaunchContext launchContext;
/**
* The possible column keys that can be requested for survey response read.
*
* @author John Jenkins
*/
public static enum ColumnKey {
/**
* The request-wide client key.
*/
CONTEXT_CLIENT ("urn:ohmage:context:client"),
/**
* The survey-wide timestamp key.
*
* @see ColumnKey#CONTEXT_TIMEZONE
*/
CONTEXT_TIMESTAMP ("urn:ohmage:context:timestamp"),
/**
* The survey's location's timestamp key where the time is adjusted to
* UTC.
*
* @see ColumnKey#CONTEXT_LOCATION_STATUS
*/
CONTEXT_LOCATION_UTC_TIMESTAMP ("urn:ohmage:context:utc_timestamp"),
/**
* The survey-wide timezone key.
*
* @see ColumnKey#CONTEXT_TIMESTAMP
*/
CONTEXT_TIMEZONE ("urn:ohmage:context:timezone"),
/**
* The key for the survey's entire launch context.
*
* @see ColumnKey#CONTEXT_LAUNCH_CONTEXT_SHORT
*/
CONTEXT_LAUNCH_CONTEXT_LONG ("urn:ohmage:context:launch_context_long"),
/**
* The key for only the survey's launch timestamp.
*
* @see ColumnKey#CONTEXT_LAUNCH_CONTEXT_SHORT
*/
CONTEXT_LAUNCH_CONTEXT_SHORT ("urn:ohmage:context:launch_context_short"),
/**
* The survey's location status key.
*
* @see ColumnKey#CONTEXT_LOCATION_ACCURACY
* @see ColumnKey#CONTEXT_LOCATION_LATITUDE
* @see ColumnKey#CONTEXT_LOCATION_LONGITUDE
* @see ColumnKey#CONTEXT_LOCATION_PROVIDER
* @see ColumnKey#CONTEXT_LOCATION_TIMESTAMP
*/
CONTEXT_LOCATION_STATUS ("urn:ohmage:context:location:status"),
/**
* The survey's location's latitude key.
*
* @see ColumnKey#CONTEXT_LOCATION_STATUS
*/
CONTEXT_LOCATION_LATITUDE ("urn:ohmage:context:location:latitude"),
/**
* The survey's location's longitude key.
*
* @see ColumnKey#CONTEXT_LOCATION_STATUS
*/
CONTEXT_LOCATION_LONGITUDE ("urn:ohmage:context:location:longitude"),
/**
* The survey's location's timestamp key.
*
* @see ColumnKey#CONTEXT_LOCATION_STATUS
*/
CONTEXT_LOCATION_TIMESTAMP ("urn:ohmage:context:location:timestamp"),
/**
* The survey's location's accuracy key.
*
* @see ColumnKey#CONTEXT_LOCATION_STATUS
*/
CONTEXT_LOCATION_ACCURACY ("urn:ohmage:context:location:accuracy"),
/**
* The survey's location's provider key.
*
* @see ColumnKey#CONTEXT_LOCATION_STATUS
*/
CONTEXT_LOCATION_PROVIDER ("urn:ohmage:context:location:provider"),
/**
* The survey's user ID key.
*/
USER_ID ("urn:ohmage:user:id"),
/**
* The survey's ID key.
*/
SURVEY_ID ("urn:ohmage:survey:id"),
/**
* The survey's title key.
*/
SURVEY_TITLE ("urn:ohmage:survey:title"),
/**
* The survey's description key.
*/
SURVEY_DESCRIPTION ("urn:ohmage:survey:description"),
/**
* The survey privacy state key.
*
* @see PrivacyState
*/
SURVEY_PRIVACY_STATE ("urn:ohmage:survey:privacy_state"),
/**
* The prompt's repeatable set ID if the prompt was part of a
* repeatable set.
*/
REPEATABLE_SET_ID ("urn:ohmage:repeatable_set:id"),
/**
* The prompt's repeatable set iteration if the prompt was part of a
* repeatable set.
*/
REPEATABLE_SET_ITERATION ("urn:ohmage:repeatable_set:iteration"),
/**
* The key used to indicate if responses are desired; however, the
* response from the server will include either only the prompts' ID
* in the case of {@link OutputFormat#JSON_ROWS} and
* {@link OutputFormat#CSV} or the prompts' ID prepended with
* {@link ColumnKey#URN_PROMPT_ID_PREFIX} in the case of
* {@link OutputFormat#JSON_COLUMNS}.
*/
PROMPT_RESPONSE ("urn:ohmage:prompt:response");
/**
* The prefix to prompt IDs in the {@link OutputFormat#JSON_COLUMNS}.
*
* @see OutputFormat#JSON_COLUMNS
*/
public static final String URN_PROMPT_ID_PREFIX = "urn:ohmage:prompt:id:";
private final String key;
/**
* Assigns the key to the enum constant.
*
* @param key The key.
*/
private ColumnKey(final String key) {
this.key = key;
}
public static ColumnKey getValue(final String key) {
ColumnKey[] values = ColumnKey.values();
for(int i = 0; i < values.length; i++) {
if(values[i].key.equals(key)) {
return values[i];
}
}
throw new IllegalArgumentException("Unknown key: " + key);
}
/**
* Returns the key value.
*
* @return The key value.
*/
@Override
public String toString() {
return key;
}
}
/**
* The known "survey response function" functions.
*
* @author John Jenkins
*/
public static enum Function {
COMPLETED_SURVEYS,
STATS;
public static Function getValue(final String key) {
return valueOf(key.toUpperCase());
}
/**
* Returns this Function as a human-readable value.
*
* @return This Function as a human-readable value.
*/
@Override
public String toString() {
return name().toLowerCase();
}
}
/**
* The possible output formats for reading survey responses.
*
* @author John Jenkins
*/
public static enum OutputFormat {
/**
* This will result in a JSONArray of JSONObjects where each JSONObject
* represents a single survey response.
*/
JSON_ROWS ("json-rows"),
/**
* This will result in a JSONObject where the key is one of the
* requested keys and the value is a JSONArray with at least the key
* "values" which is a JSONArray of the type-appropriate values. There
* may also be a key called "context" which further describes the
* prompt responses.
*
* @see org.ohmage.request.survey.SurveyResponseReadRequest#JSON_KEY_CONTEXT JSON_KEY_CONTEXT
* @see org.ohmage.request.survey.SurveyResponseReadRequest#JSON_KEY_VALUES JSON_KEY_VALUES
*/
JSON_COLUMNS ("json-columns"),
/**
* This will result in a file attachment which contains CSV-formatted
* results. The results will contain headers for the requested columns
* and one additional column for each of the requested prompts. If a
* row does not contain a response to the question, "null" will be
* output instead.
*/
CSV ("csv");
private final String key;
/**
* Assigns the key to the enum constant.
*
* @param key The key value to be associated with the enum.
*/
private OutputFormat(final String key) {
this.key = key;
}
public static OutputFormat getValue(final String key) {
OutputFormat[] values = OutputFormat.values();
for(int i = 0; i < values.length; i++) {
if(values[i].key.equals(key)) {
return values[i];
}
}
throw new IllegalArgumentException("Unknown key: " + key);
}
/**
* Returns the key value.
*
* @return The key value.
*/
@Override
public String toString() {
return key;
}
}
/**
* This represents the different sort parameters influencing how the final
* results are presented to the user.
*
* @author John Jenkins
*/
public static enum SortParameter {
SURVEY,
TIMESTAMP,
USER;
public static SortParameter getValue(final String value) {
return SortParameter.valueOf(value.toUpperCase());
}
/**
* Returns this sort parameter as a lower case version of its name.
*
* @return This sort parameter as a lower case version of its name.
*/
@Override
public String toString() {
return name().toLowerCase();
}
}
public SurveyResponse(final Survey survey, final long surveyResponseId,
final String username, final String campaignId, final String client,
final Date date, final long time, final TimeZone timezone,
final JSONObject launchContext,
final String locationStatus, final JSONObject location,
final PrivacyState privacyState)
throws ErrorCodeException {
if(survey == null) {
throw new IllegalArgumentException("The survey cannot be null.");
}
if(StringUtils.isEmptyOrWhitespaceOnly(username)) {
throw new IllegalArgumentException("The username cannot be null or whitespace only.");
}
else if(StringUtils.isEmptyOrWhitespaceOnly(campaignId)) {
throw new IllegalArgumentException("The campaign ID cannot be null or whitespace only.");
}
else if(StringUtils.isEmptyOrWhitespaceOnly(client)) {
throw new IllegalArgumentException("The client cannot be null or whitespace only.");
}
else if(date == null) {
throw new IllegalArgumentException("The date cannot be null.");
}
else if(timezone == null) {
throw new IllegalArgumentException("The timezone cannot be null.");
}
else if(launchContext == null) {
throw new IllegalArgumentException("The launch context cannot be null.");
}
else if(locationStatus == null) {
throw new IllegalArgumentException("The location status cannot be null.");
}
else if(privacyState == null) {
throw new IllegalArgumentException("The privacy state cannot be null.");
}
this.username = username;
this.campaignId = campaignId;
this.client = client;
this.date = date;
this.time = time;
this.timezone = timezone;
this.surveyResponseId = surveyResponseId;
this.survey = survey;
this.privacyState = privacyState;
this.launchContext = new LaunchContext(launchContext);
try {
this.locationStatus = LocationStatus.valueOf(locationStatus.toUpperCase());
}
catch(IllegalArgumentException e) {
throw new IllegalArgumentException("Unknown location status.", e);
}
if(location != null) {
this.location = new Location(location);
}
else {
this.location = null;
}
responses = new HashMap<Integer, Response>();
}
public SurveyResponse(final Survey survey, final long surveyResponseId,
final String username, final String campaignId, final String client,
final Date date, final long time, final TimeZone timezone,
final LaunchContext launchContext,
final LocationStatus locationStatus, final Location location,
final PrivacyState privacyState,
final Map<Integer, Response> responses)
throws ErrorCodeException {
if(survey == null) {
throw new IllegalArgumentException("The survey cannot be null.");
}
if(StringUtils.isEmptyOrWhitespaceOnly(username)) {
throw new IllegalArgumentException("The username cannot be null or whitespace only.");
}
else if(StringUtils.isEmptyOrWhitespaceOnly(campaignId)) {
throw new IllegalArgumentException("The campaign ID cannot be null or whitespace only.");
}
else if(StringUtils.isEmptyOrWhitespaceOnly(client)) {
throw new IllegalArgumentException("The client cannot be null or whitespace only.");
}
else if(date == null) {
throw new IllegalArgumentException("The date cannot be null.");
}
else if(timezone == null) {
throw new IllegalArgumentException("The timezone cannot be null.");
}
else if(launchContext == null) {
throw new IllegalArgumentException("The launch context cannot be null.");
}
else if(locationStatus == null) {
throw new IllegalArgumentException("The location status cannot be null.");
}
else if((! LocationStatus.UNAVAILABLE.equals(locationStatus)) &&
(location == null)) {
throw new IllegalArgumentException("THe location status is not unavailable, but the location status is null.");
}
else if(privacyState == null) {
throw new IllegalArgumentException("The privacy state cannot be null.");
}
this.username = username;
this.campaignId = campaignId;
this.client = client;
this.date = date;
this.time = time;
this.timezone = timezone;
this.surveyResponseId = surveyResponseId;
this.survey = survey;
this.privacyState = privacyState;
this.launchContext = launchContext;
this.locationStatus = locationStatus;
if((! LocationStatus.UNAVAILABLE.equals(locationStatus)) &&
(location == null)) {
throw new IllegalArgumentException("The location cannot be null unless the location status is unavailable.");
}
this.location = location;
this.responses = new HashMap<Integer, Response>(responses);
}
/**
* Creates a SurveyResponse object based on a JSONObject.
*
* @param username The username of the user that created this survey
* response.
*
* @param campaignId The campaign's unique identifier.
*
* @param client The client value.
*
* @param campaign The campaign.
*
* @param response The survey response as a JSONObject.
*
* @throws ErrorCodeException Thrown if the JSONObject could not be decoded
* as a survey response.
*/
public SurveyResponse(final long surveyResponseId,
final String username, final String campaignId,
final String client, final Campaign campaign,
final JSONObject response) throws ErrorCodeException {
if(StringUtils.isEmptyOrWhitespaceOnly(username)) {
throw new ErrorCodeException(ErrorCode.USER_INVALID_USERNAME, "The username is invalid.");
}
else if(StringUtils.isEmptyOrWhitespaceOnly(campaignId)) {
throw new ErrorCodeException(ErrorCode.CAMPAIGN_INVALID_ID, "The campaign ID is invalid.");
}
else if(StringUtils.isEmptyOrWhitespaceOnly(client)) {
throw new ErrorCodeException(ErrorCode.SERVER_INVALID_CLIENT, "The client value is invalid.");
}
this.surveyResponseId = surveyResponseId;
this.username = username;
this.campaignId = campaignId;
this.client = client;
try {
date = StringUtils.decodeDateTime(response.getString(JSON_KEY_DATE));
if(date == null) {
throw new ErrorCodeException(ErrorCode.SERVER_INVALID_DATE, "The date was not a valid date.");
}
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SERVER_INVALID_DATE, "The date is missing.", e);
}
try {
time = response.getLong(JSON_KEY_TIME);
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SERVER_INVALID_TIME, "The time is missing.", e);
}
try {
timezone = TimeZone.getTimeZone(response.getString(JSON_KEY_TIMEZONE));
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SERVER_INVALID_TIMEZONE, "The timezone is missing.", e);
}
String surveyId;
try {
surveyId = response.getString(JSON_KEY_SURVEY_ID);
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_SURVEY_ID, "The survey ID is missing.", e);
}
survey = campaign.getSurveys().get(surveyId);
if(survey == null) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_SURVEY_ID, "The survey ID doesn't refer to any known surveys in the campaign.");
}
try {
launchContext = new LaunchContext(response.getJSONObject(JSON_KEY_SURVEY_LAUNCH_CONTEXT));
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_LAUNCH_CONTEXT, "The launch context is missing.", e);
}
try {
locationStatus = LocationStatus.valueOf(response.getString(JSON_KEY_LOCATION_STATUS).toUpperCase());
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SERVER_INVALID_LOCATION_STATUS, "The location status is missing.", e);
}
catch(IllegalArgumentException e) {
throw new ErrorCodeException(ErrorCode.SERVER_INVALID_LOCATION_STATUS, "The location status is unknown.", e);
}
Location tLocation = null;
try {
tLocation = new Location(response.getJSONObject(JSON_KEY_LOCATION));
}
catch(JSONException e) {
if(!LocationStatus.UNAVAILABLE.equals(locationStatus)) {
throw new ErrorCodeException(ErrorCode.SERVER_INVALID_LOCATION, "The location is missing.", e);
}
}
location = tLocation;
this.privacyState = PrivacyState.PRIVATE;
JSONArray responses;
try {
responses = response.getJSONArray(JSON_KEY_RESPONSES);
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "There weren't any responses for the survey.", e);
}
this.responses = processResponses(campaign.getSurveys().get(surveyId).getSurveyItems(), responses, null);
}
/**
* Returns the survey response's unique identifier.
*
* @return The survey response's unique identifier.
*/
public final long getSurveyResponseId() {
return surveyResponseId;
}
/**
* Returns the username.
*
* @return The username.
*/
public final String getUsername() {
return username;
}
/**
* Returns the campaign ID for the campaign to which this survey belongs.
*
* @return The campaign ID.
*/
public final String getCampaignId() {
return campaignId;
}
/**
* Returns the client value that was given when this survey response was
* uploaded.
*
* @return The client value.
*/
public final String getClient() {
return client;
}
/**
* Returns a copy of the date object representing when this survey response
* was generated. This should be correlated with {@link #getTimezone()}.
*
* @return A copy of the date object.
*
* @see #getTimezone()
*/
public final Date getDate() {
return new Date(date.getTime());
}
/**
* Returns the time this survey response was generated as a long value from
* the epoch.
*
* @return The time in milliseconds since the epoch.
*/
public final long getTime() {
return time;
}
/**
* Returns a copy of the timezone object representing the timezone of the
* device that generated this survey response at the time it was generated.
*
* @return The phone's timezone.
*/
public final TimeZone getTimezone() {
return new SimpleTimeZone(timezone.getRawOffset(), timezone.getID());
}
/**
* Returns the campaign-wide unique identifier for this survey response.
*
* @return The unique identifier for the survey for which this is a
* response.
*/
public final Survey getSurvey() {
return survey;
}
/**
* Returns context information from the phone when this survey was
* launched.
*
* @return Context information from the phone.
*/
public final LaunchContext getLaunchContext() {
return launchContext;
}
/**
* Returns the status of the location information collected when this
* survey response was generated.
*
* @return The location's status.
*/
public final LocationStatus getLocationStatus() {
return locationStatus;
}
/**
* Returns the location information about the phone when the survey
* response was generated if applicable.
*
* @return The location information if available or null if not.
*/
public final Location getLocation() {
return location;
}
/**
* Returns the privacy state of this survey response.
*
* @return The privacy state.
*/
public final PrivacyState getPrivacyState() {
return privacyState;
}
/**
* Returns an unmodifiable list of the responses.
*
* @return An unmodifiable list of the responses.
*/
public final Map<Integer, Response> getResponses() {
return Collections.unmodifiableMap(responses);
}
public final void addPromptResponse(final PromptResponse promptResponse) {
if(promptResponse == null) {
throw new IllegalArgumentException("The prompt response is null.");
}
RepeatableSet parent = promptResponse.getPrompt().getParent();
if(parent == null) {
responses.put(promptResponse.getPrompt().getIndex(), promptResponse);
}
else {
// FIXME: This assumes repeatable sets cannot contain repeatable
// sets. This needs to be fixed if we ever allow it.
int index = parent.getIndex();
RepeatableSetResponse rsResponse = (RepeatableSetResponse) responses.get(index);
if(rsResponse == null) {
rsResponse = new RepeatableSetResponse(parent, null);
responses.put(index, rsResponse);
}
rsResponse.addResponse(promptResponse.getRepeatableSetIteration(), index, promptResponse);
}
}
/**
* Returns a set of all of the prompt IDs for all of the responses in this
* survey response.
*
* @return A set of the prompt IDs.
*/
public Set<String> getPromptIds() {
return getPromptIds(responses.values());
}
/**
* Creates a JSONObject that represents this survey response object based
* on the given flags.
*
* @param withUsername Whether or not to include the username.
*
* @param withCampaignId Whether or not to include the campaign's ID.
*
* @param withClient Whether or not to include the client value.
*
* @param withPrivacyState Whether or not to include the privacy state.
*
* @param withDate Whether or not to include the date.
*
* @param withTime Whether or not to include the time.
*
* @param withTimezone Whether or not to include the timezone.
*
* @param withLocationStatus Whether or not to include the location status.
*
* @param withLocation Whether or not to include the location information.
* This may not be included anyway if there was no
* location information for this survey response.
*
* @param withSurveyId Whether or not to include the survey's ID.
*
* @param withSurveyTitle Whether or not to include the survey's title.
*
* @param withSurveyDescription Whether or not to include the survey's
* description.
*
* @param withSurveyLaunchContext Whether or not to include the survey's
* launch context.
*
* @param surveyLaunchContextLong If we are including the launch context,
* this indicates whether we are adding
* everything in the launch context or only
* the launch date and time.
*
* @param withResponses Whether or not to include the prompt responses.
*
* @param arrayInsteadOfObject Valid only if 'withResponses' is true, this
* will determine how the responses are output,
* either as a JSONObject where the keys are
* the prompt IDs and their value is another
* JSONObject that describes the prompt and the
* user's response or as a JSONArray of
* JSONObjects that describe the prompt and the
* user's response. If false, the former will
* happen; if true, the latter will happen.
*
* @return A JSONObject that represents this object or null if there was an
* error.
*/
public final JSONObject toJson(final boolean withUsername,
final boolean withCampaignId, final boolean withClient,
final boolean withPrivacyState,
final boolean withDate, final boolean withTime,
final boolean withTimezone,
final boolean withLocationStatus, final boolean withLocation,
final boolean withSurveyId,
final boolean withSurveyTitle, final boolean withSurveyDescription,
// TODO: This could lead to unpredictable output if a user put true
// for both of these. Can we switch it back to whether or not the
// launch context should be output as one parameter and if the
// output should be short instead of long (or visa versa) for the
// other parameter?
final boolean withLaunchContextShort,
final boolean withLaunchContextLong,
final boolean withResponses, final boolean arrayInsteadOfObject,
final boolean withId) {
try {
JSONObject result = new JSONObject();
if(withUsername) {
result.put(JSON_KEY_USERNAME, username);
}
if(withCampaignId) {
result.put(JSON_KEY_CAMPAIGN_ID, campaignId);
}
if(withClient) {
result.put(JSON_KEY_CLIENT, client);
}
if(withPrivacyState) {
result.put(JSON_KEY_PRIVACY_STATE, privacyState.toString());
}
if(withDate) {
result.put(JSON_KEY_DATE, TimeUtils.getIso8601DateTimeString(date));
}
if(withTime) {
result.put(JSON_KEY_TIME, time);
}
if(withTimezone) {
result.put(JSON_KEY_TIMEZONE, timezone.getID());
}
if(withLocationStatus) {
result.put(JSON_KEY_LOCATION_STATUS, locationStatus.toString());
}
if(withLocation && (location != null)) {
result.put(JSON_KEY_LOCATION, location.toJson(false));
}
if(withSurveyId && (survey != null)) {
result.put(JSON_KEY_SURVEY_ID, survey.getId());
}
if(withSurveyTitle && (survey != null)) {
result.put(JSON_KEY_SURVEY_NAME, survey.getTitle());
}
if(withSurveyDescription && (survey != null)) {
result.put(JSON_KEY_SURVEY_DESCRIPTION, survey.getDescription());
}
if(withLaunchContextShort) {
result.put(JSON_KEY_SURVEY_LAUNCH_CONTEXT_SHORT, launchContext.toJson(false));
}
if(withLaunchContextLong) {
result.put(JSON_KEY_SURVEY_LAUNCH_CONTEXT_LONG, launchContext.toJson(true));
}
if(withResponses) {
List<Integer> indices = new ArrayList<Integer>(responses.keySet());
Collections.sort(indices);
if(arrayInsteadOfObject) {
JSONArray responses = new JSONArray();
for(Integer index : indices) {
responses.put(this.responses.get(index).toJson(true));
}
result.put(JSON_KEY_RESPONSES, responses);
}
else {
JSONObject responses = new JSONObject();
for(Integer index : indices) {
Response response = this.responses.get(index);
responses.put(response.getId(), response.toJson(false));
}
result.put(JSON_KEY_RESPONSES, responses);
}
}
if(withId) {
result.put(JSON_KEY_SURVEY_RESPONSE_ID, surveyResponseId);
}
return result;
}
catch(JSONException e) {
LOGGER.warn("Could not generate JSON from a survey response", e);
return null;
}
}
/**
* Generates a hash code for this survey response.
*
* @return A hash code for this survey response.
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result
+ ((campaignId == null) ? 0 : campaignId.hashCode());
result = prime * result + ((client == null) ? 0 : client.hashCode());
result = prime * result + ((date == null) ? 0 : date.hashCode());
result = prime * result
+ ((launchContext == null) ? 0 : launchContext.hashCode());
result = prime * result
+ ((location == null) ? 0 : location.hashCode());
result = prime * result
+ ((locationStatus == null) ? 0 : locationStatus.hashCode());
result = prime * result
+ ((privacyState == null) ? 0 : privacyState.hashCode());
result = prime * result
+ ((responses == null) ? 0 : responses.hashCode());
result = prime * result + ((survey == null) ? 0 : survey.hashCode());
result = prime * result
+ (int) (surveyResponseId ^ (surveyResponseId >>> 32));
result = prime * result + (int) (time ^ (time >>> 32));
result = prime * result
+ ((username == null) ? 0 : username.hashCode());
return result;
}
/**
* Determines if this survey response is equivalent to another object.
*
* @param obj The other object.
*
* @return True if the other object is logically equivalent to this survey
* response; false, otherwise.
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
SurveyResponse other = (SurveyResponse) obj;
if (campaignId == null) {
if (other.campaignId != null)
return false;
} else if (!campaignId.equals(other.campaignId))
return false;
if (client == null) {
if (other.client != null)
return false;
} else if (!client.equals(other.client))
return false;
if (date == null) {
if (other.date != null)
return false;
} else if (!date.equals(other.date))
return false;
if (launchContext == null) {
if (other.launchContext != null)
return false;
} else if (!launchContext.equals(other.launchContext))
return false;
if (location == null) {
if (other.location != null)
return false;
} else if (!location.equals(other.location))
return false;
if (locationStatus != other.locationStatus)
return false;
if (privacyState != other.privacyState)
return false;
if (responses == null) {
if (other.responses != null)
return false;
} else if (!responses.equals(other.responses))
return false;
if (survey == null) {
if (other.survey != null)
return false;
} else if (!survey.equals(other.survey))
return false;
if (surveyResponseId != other.surveyResponseId)
return false;
if (time != other.time)
return false;
if (username == null) {
if (other.username != null)
return false;
} else if (!username.equals(other.username))
return false;
return true;
}
/**
* Processes an JSONArray of survey responses based on their survey item
* counterparts.
*
* @param surveyItems The survey items to which the responses should
* pertain.
*
* @param currArray A JSONArray of JSONObjects where each object is a
* response.
*
* @param repeatableSetIteration If this is processing the responses of a
* repeatable set, then this is the iteration
* of that repeatable set. Otherwise, it may
* be null.
*
* @return A map of a response's index to its Response object.
*
* @throws ErrorCodeException Thrown if any of the responses are invalid
* either syntactically or as compared to the
* survey objects.
*/
private Map<Integer, Response> processResponses(
final Map<Integer, SurveyItem> surveyItems,
final JSONArray currArray, final Integer repeatableSetIteration)
throws ErrorCodeException {
int numResponses = currArray.length();
Map<Integer, Response> results = new HashMap<Integer, Response>(numResponses);
for(int i = 0; i < numResponses; i++) {
try {
JSONObject currResponse = currArray.getJSONObject(i);
try {
String promptId = currResponse.getString(JSON_KEY_PROMPT_ID);
Prompt prompt = null;
for(SurveyItem surveyItem : surveyItems.values()) {
if(surveyItem.getId().equals(promptId)) {
prompt = (Prompt) surveyItem;
break;
}
}
if(prompt == null) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "The prompt ID is unknown: " + promptId);
}
results.put(
prompt.getIndex(),
processPromptResponse(
prompt,
currResponse,
repeatableSetIteration));
}
catch(JSONException notPrompt) {
try {
String repeatableSetId = currResponse.getString(JSON_KEY_REPEATABLE_SET_ID);
RepeatableSet repeatableSet = null;
for(SurveyItem surveyItem : surveyItems.values()) {
if(surveyItem.getId().equals(repeatableSetId)) {
repeatableSet = (RepeatableSet) surveyItem;
}
}
if(repeatableSet == null) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "The repeatable set ID is unknown: " + repeatableSetId);
}
results.put(
repeatableSet.getIndex(),
processRepeatableSet(
repeatableSet,
currResponse));
}
catch(JSONException notRepeatableSet) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "The response wasn't a prompt response or repeatable set.");
}
}
catch(ClassCastException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "The response and XML disagree on the type of a survey item.", e);
}
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "A response was not valid JSON.");
}
}
for(SurveyItem surveyItem : surveyItems.values()) {
if(! (surveyItem instanceof Message)) {
String surveyItemId = surveyItem.getId();
boolean found = false;
for(Response response : results.values()) {
if(response.getId().equals(surveyItemId)) {
found = true;
break;
}
}
if(! found) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "The response is missing a response for the prompt: " + surveyItemId);
}
}
}
return results;
}
/**
* Creates a PromptResponse object from a Prompt and the JSONObject that
* represents the response to the prompt.
*
* @param prompt The Prompt from which the response was generated.
*
* @param response The response from the user as a JSONObject.
*
* @param repeatableSetIteration If the prompt was part of a repeatable
* set, this is the iteration of that
* repeatable set.
*
* @return A PromptResponse generated by the 'prompt' based on the value in
* the 'response'.
*
* @throws ErrorCodeException Thrown if the JSONObject is invalid for
* getting a response value.
*/
private PromptResponse processPromptResponse(final Prompt prompt,
final JSONObject response, final Integer repeatableSetIteration)
throws ErrorCodeException {
Object responseObject;
try {
responseObject = response.get(JSON_KEY_PROMPT_VALUE);
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "The response value was missing.", e);
}
// FIXME:
// This is the shim layer that allows custom choice prompts to still
// upload a key and lookup table instead of just the raw values.
if(prompt instanceof CustomChoicePrompt) {
try {
Map<Integer, String> choicesMap = new HashMap<Integer, String>();
JSONArray choices = response.getJSONArray("custom_choices");
int numChoices = choices.length();
for(int i = 0; i < numChoices; i++) {
JSONObject currChoice = choices.getJSONObject(i);
choicesMap.put(currChoice.getInt("choice_id"), currChoice.getString("choice_value"));
}
if(prompt instanceof MultiChoiceCustomPrompt) {
JSONArray responsesJson = (JSONArray) responseObject;
int numResponses = responsesJson.length();
Collection<String> responses = new ArrayList<String>(numResponses);
for(int i = 0; i < numResponses; i++) {
LOGGER.info(choicesMap.get(responsesJson.get(i)));
responses.add(choicesMap.get(responsesJson.get(i)));
}
responseObject = responses;
}
else if(prompt instanceof SingleChoiceCustomPrompt) {
Integer singleChoiceResponse = (Integer) responseObject;
responseObject = choicesMap.get(singleChoiceResponse);
}
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "The dictionary for the custom choice prompt was missing or malformed: " + prompt.getId());
}
}
try {
return prompt.createResponse(responseObject, repeatableSetIteration);
}
catch(IllegalArgumentException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "The response value was invalid.", e);
}
}
/**
* Creates a RepeatableSetResponse object from a RepeatableSet and the
* responses in a JSONObject.
*
* @param repeatableSet The RepeatableSet that generated the responses.
*
* @param response The responses as a JSONObject.
*
* @return A RepeatableSetResponse object that represents the responses
* to this repeatable set gathered from the JSONObject.
*
* @throws ErrorCodeException Thrown if the repeatable set JSONObject is
* malformed.
*/
private RepeatableSetResponse processRepeatableSet(
final RepeatableSet repeatableSet, final JSONObject response)
throws ErrorCodeException {
try {
if(response.getBoolean(RepeatableSetResponse.NOT_DISPLAYED)) {
return new RepeatableSetResponse(repeatableSet, NoResponse.NOT_DISPLAYED);
}
}
catch(JSONException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "The not displayed value is missing.", e);
}
RepeatableSetResponse result = new RepeatableSetResponse(repeatableSet, null);
JSONArray responses;
try {
responses = response.getJSONArray(JSON_KEY_RESPONSES);
} catch (JSONException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "The responses array for the repeatable set are missing.", e);
}
int numIterations = responses.length();
for(int i = 0; i < numIterations; i++) {
try {
result.addResponseGroup(
i + 1,
processResponses(
repeatableSet.getSurveyItems(),
responses.getJSONArray(i),
i
)
);
} catch (JSONException e) {
throw new ErrorCodeException(ErrorCode.SURVEY_INVALID_RESPONSES, "One of the response array objects for a repeatable set is not a JSONArray.", e);
}
}
return result;
}
/**
* Returns a set of all of the prompt IDs for all of the responses in this
* survey response.
*
* @param responses A collection of responses.
*
* @return A set of the prompt IDs.
*/
private Set<String> getPromptIds(final Collection<Response> responses) {
Set<String> result = new HashSet<String>();
for(Response response : responses) {
if(response instanceof PromptResponse) {
result.add(response.getId());
}
else if(response instanceof RepeatableSetResponse) {
for(Map<Integer, Response> responseGroup :
((RepeatableSetResponse) response).getResponseGroups().values()) {
result.addAll(getPromptIds(responseGroup.values()));
}
}
}
return result;
}
}
|
package org.embulk.filter;
import com.google.common.base.Optional;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import org.embulk.config.Config;
import org.embulk.config.ConfigDefault;
import org.embulk.config.ConfigException;
import org.embulk.config.ConfigSource;
import org.embulk.config.Task;
import org.embulk.config.TaskSource;
import org.embulk.spi.Column;
import org.embulk.spi.ColumnVisitor;
import org.embulk.spi.Exec;
import org.embulk.spi.FilterPlugin;
import org.embulk.spi.Page;
import org.embulk.spi.PageBuilder;
import org.embulk.spi.PageOutput;
import org.embulk.spi.PageReader;
import org.embulk.spi.Schema;
import org.embulk.spi.SchemaConfigException;
import org.embulk.spi.json.JsonParser;
import org.embulk.spi.time.Timestamp;
import org.embulk.spi.time.TimestampParseException;
import org.embulk.spi.time.TimestampParser;
import org.embulk.spi.type.*;
import org.joda.time.DateTimeZone;
import org.msgpack.value.Value;
import org.slf4j.Logger;
import java.util.HashMap;
import java.util.List;
public class ColumnFilterPlugin implements FilterPlugin
{
private static final Logger logger = Exec.getLogger(ColumnFilterPlugin.class);
public ColumnFilterPlugin()
{
}
// NOTE: This is not spi.ColumnConfig
private interface ColumnConfig extends Task
{
@Config("name")
public String getName();
@Config("type")
@ConfigDefault("null")
public Optional<Type> getType(); // required only for addColumns
@Config("default")
@ConfigDefault("null")
public Optional<Object> getDefault();
@Config("format")
@ConfigDefault("null")
public Optional<String> getFormat();
@Config("timezone")
@ConfigDefault("null")
public Optional<DateTimeZone> getTimeZone();
@Config("src")
@ConfigDefault("null")
public Optional<String> getSrc();
}
public interface PluginTask extends Task, TimestampParser.Task
{
@Config("columns")
@ConfigDefault("[]")
public List<ColumnConfig> getColumns();
@Config("add_columns")
@ConfigDefault("[]")
public List<ColumnConfig> getAddColumns();
@Config("drop_columns")
@ConfigDefault("[]")
public List<ColumnConfig> getDropColumns();
// See TimestampParser for default_timestamp_format, and default_timezone
}
@Override
public void transaction(final ConfigSource config, final Schema inputSchema,
final FilterPlugin.Control control)
{
PluginTask task = config.loadConfig(PluginTask.class);
List<ColumnConfig> columns = task.getColumns();
List<ColumnConfig> addColumns = task.getAddColumns();
List<ColumnConfig> dropColumns = task.getDropColumns();
if (columns.size() == 0 && addColumns.size() == 0 && dropColumns.size() == 0) {
throw new ConfigException("One of \"columns\", \"add_columns\", \"drop_columns\" must be specified.");
}
if (columns.size() > 0 && dropColumns.size() > 0) {
throw new ConfigException("Either of \"columns\", \"drop_columns\" can be specified.");
}
// Automatically get column type from inputSchema for columns and dropColumns
ImmutableList.Builder<Column> builder = ImmutableList.builder();
int i = 0;
if (dropColumns.size() > 0) {
for (Column inputColumn : inputSchema.getColumns()) {
String name = inputColumn.getName();
boolean matched = false;
for (ColumnConfig dropColumn : dropColumns) {
if (dropColumn.getName().equals(name)) {
matched = true;
break;
}
}
if (! matched) {
Column outputColumn = new Column(i++, name, inputColumn.getType());
builder.add(outputColumn);
}
}
}
else if (columns.size() > 0) {
for (ColumnConfig column : columns) {
String name = column.getName();
Optional<Type> type = column.getType();
Optional<Object> defaultValue = column.getDefault();
Optional<String> src = column.getSrc();
String srcName = src.isPresent() ? src.get() : name;
Column inputColumn = getColumn(srcName, inputSchema);
if (inputColumn != null) { // filter or copy column
Column outputColumn = new Column(i++, name, inputColumn.getType());
builder.add(outputColumn);
}
else if (type.isPresent() && defaultValue.isPresent()) { // add column
Column outputColumn = new Column(i++, name, type.get());
builder.add(outputColumn);
}
else {
throw new SchemaConfigException(String.format("columns: Column src '%s' is not found in inputSchema. Column '%s' does not have \"type\" and \"default\"", srcName, name));
}
}
}
else {
for (Column inputColumn : inputSchema.getColumns()) {
Column outputColumn = new Column(i++, inputColumn.getName(), inputColumn.getType());
builder.add(outputColumn);
}
}
// Add columns to last. If you want to add to head or middle, you can use `columns` option
if (addColumns.size() > 0) {
for (ColumnConfig column : addColumns) {
String name = column.getName();
Optional<Type> type = column.getType();
Optional<Object> defaultValue = column.getDefault();
Optional<String> src = column.getSrc();
String srcName = null;
Column inputColumn = null;
if (src.isPresent()) {
srcName = src.get();
inputColumn = getColumn(srcName, inputSchema);
}
if (inputColumn != null) { // copy column
Column outputColumn = new Column(i++, name, inputColumn.getType());
builder.add(outputColumn);
}
else if (type.isPresent() && defaultValue.isPresent()) { // add column
Column outputColumn = new Column(i++, name, type.get());
builder.add(outputColumn);
}
else {
throw new SchemaConfigException(String.format("add_columns: Column src '%s' is not found in inputSchema, Column '%s' does not have \"type\" and \"default\"", srcName, name));
}
}
}
Schema outputSchema = new Schema(builder.build());
control.run(task.dump(), outputSchema);
}
private Column getColumn(String name, Schema schema)
{
// hash should be faster, though
for (Column column : schema.getColumns()) {
if (column.getName().equals(name)) {
return column;
}
}
return null;
}
private String getSrc(String name, List<ColumnConfig> columnConfigs)
{
for (ColumnConfig columnConfig : columnConfigs) {
if (columnConfig.getName().equals(name) &&
columnConfig.getSrc().isPresent()) {
return (String) columnConfig.getSrc().get();
}
}
return null;
}
private Object getDefault(String name, Type type, List<ColumnConfig> columnConfigs, PluginTask task)
{
for (ColumnConfig columnConfig : columnConfigs) {
if (columnConfig.getName().equals(name)) {
if (type instanceof BooleanType) {
if (columnConfig.getDefault().isPresent()) {
return (Boolean) columnConfig.getDefault().get();
}
}
else if (type instanceof LongType) {
if (columnConfig.getDefault().isPresent()) {
return new Long(columnConfig.getDefault().get().toString());
}
}
else if (type instanceof DoubleType) {
if (columnConfig.getDefault().isPresent()) {
return new Double(columnConfig.getDefault().get().toString());
}
}
else if (type instanceof StringType) {
if (columnConfig.getDefault().isPresent()) {
return (String) columnConfig.getDefault().get();
}
}
else if (type instanceof JsonType) {
if (columnConfig.getDefault().isPresent()) {
JsonParser parser = new JsonParser();
return parser.parse((String) columnConfig.getDefault().get());
}
}
else if (type instanceof TimestampType) {
if (columnConfig.getDefault().isPresent()) {
String time = (String) columnConfig.getDefault().get();
String format = null;
if (columnConfig.getFormat().isPresent()) {
format = columnConfig.getFormat().get();
}
else {
format = task.getDefaultTimestampFormat();
}
DateTimeZone timezone = null;
if (columnConfig.getTimeZone().isPresent()) {
timezone = columnConfig.getTimeZone().get();
}
else {
timezone = task.getDefaultTimeZone();
}
TimestampParser parser = new TimestampParser(task.getJRuby(), format, timezone);
try {
Timestamp defaultValue = parser.parse(time);
return defaultValue;
}
catch (TimestampParseException ex) {
throw Throwables.propagate(ex);
}
}
}
return null;
}
}
return null;
}
@Override
public PageOutput open(final TaskSource taskSource, final Schema inputSchema,
final Schema outputSchema, final PageOutput output)
{
PluginTask task = taskSource.loadTask(PluginTask.class);
// Map outputColumn => inputColumn
final HashMap<Column, Column> outputInputColumnMap = new HashMap<Column, Column>();
for (Column outputColumn : outputSchema.getColumns()) {
String name = outputColumn.getName();
String srcName = getSrc(name, task.getColumns());
if (srcName == null) {
srcName = getSrc(name, task.getAddColumns());
}
if (srcName == null) {
srcName = name;
}
Column inputColumn = getColumn(srcName, inputSchema);
outputInputColumnMap.put(outputColumn, inputColumn); // NOTE: inputColumn would be null
}
// Map outputColumn => default value if present
final HashMap<Column, Object> outputDefaultMap = new HashMap<Column, Object>();
for (Column outputColumn : outputSchema.getColumns()) {
String name = outputColumn.getName();
Type type = outputColumn.getType();
Object defaultValue = getDefault(name, type, task.getColumns(), task);
if (defaultValue == null) {
defaultValue = getDefault(name, type, task.getAddColumns(), task);
}
if (defaultValue != null) {
outputDefaultMap.put(outputColumn, defaultValue);
}
}
return new PageOutput() {
private PageReader pageReader = new PageReader(inputSchema);
private PageBuilder pageBuilder = new PageBuilder(Exec.getBufferAllocator(), outputSchema, output);
private ColumnVisitorImpl visitor = new ColumnVisitorImpl(pageBuilder);
@Override
public void finish()
{
pageBuilder.finish();
}
@Override
public void close()
{
pageBuilder.close();
}
@Override
public void add(Page page)
{
pageReader.setPage(page);
while (pageReader.nextRecord()) {
outputSchema.visitColumns(visitor);
pageBuilder.addRecord();
}
}
class ColumnVisitorImpl implements ColumnVisitor
{
private final PageBuilder pageBuilder;
ColumnVisitorImpl(PageBuilder pageBuilder)
{
this.pageBuilder = pageBuilder;
}
@Override
public void booleanColumn(Column outputColumn)
{
Column inputColumn = outputInputColumnMap.get(outputColumn);
if (inputColumn == null || pageReader.isNull(inputColumn)) {
Boolean defaultValue = (Boolean) outputDefaultMap.get(outputColumn);
if (defaultValue != null) {
pageBuilder.setBoolean(outputColumn, defaultValue.booleanValue());
}
else {
pageBuilder.setNull(outputColumn);
}
}
else {
pageBuilder.setBoolean(outputColumn, pageReader.getBoolean(inputColumn));
}
}
@Override
public void longColumn(Column outputColumn)
{
Column inputColumn = outputInputColumnMap.get(outputColumn);
if (inputColumn == null || pageReader.isNull(inputColumn)) {
Long defaultValue = (Long) outputDefaultMap.get(outputColumn);
if (defaultValue != null) {
pageBuilder.setLong(outputColumn, defaultValue.longValue());
}
else {
pageBuilder.setNull(outputColumn);
}
}
else {
pageBuilder.setLong(outputColumn, pageReader.getLong(inputColumn));
}
}
@Override
public void doubleColumn(Column outputColumn)
{
Column inputColumn = outputInputColumnMap.get(outputColumn);
if (inputColumn == null || pageReader.isNull(inputColumn)) {
Double defaultValue = (Double) outputDefaultMap.get(outputColumn);
if (defaultValue != null) {
pageBuilder.setDouble(outputColumn, defaultValue.doubleValue());
}
else {
pageBuilder.setNull(outputColumn);
}
}
else {
pageBuilder.setDouble(outputColumn, pageReader.getDouble(inputColumn));
}
}
@Override
public void stringColumn(Column outputColumn)
{
Column inputColumn = outputInputColumnMap.get(outputColumn);
if (inputColumn == null || pageReader.isNull(inputColumn)) {
String defaultValue = (String) outputDefaultMap.get(outputColumn);
if (defaultValue != null) {
pageBuilder.setString(outputColumn, defaultValue);
}
else {
pageBuilder.setNull(outputColumn);
}
}
else {
pageBuilder.setString(outputColumn, pageReader.getString(inputColumn));
}
}
@Override
public void jsonColumn(Column outputColumn)
{
Column inputColumn = outputInputColumnMap.get(outputColumn);
if (inputColumn == null || pageReader.isNull(inputColumn)) {
Value defaultValue = (Value) outputDefaultMap.get(outputColumn);
if (defaultValue != null) {
pageBuilder.setJson(outputColumn, defaultValue);
}
else {
pageBuilder.setNull(outputColumn);
}
}
else {
pageBuilder.setJson(outputColumn, pageReader.getJson(inputColumn));
}
}
@Override
public void timestampColumn(Column outputColumn)
{
Column inputColumn = outputInputColumnMap.get(outputColumn);
if (inputColumn == null || pageReader.isNull(inputColumn)) {
Timestamp defaultValue = (Timestamp) outputDefaultMap.get(outputColumn);
if (defaultValue != null) {
pageBuilder.setTimestamp(outputColumn, defaultValue);
}
else {
pageBuilder.setNull(outputColumn);
}
}
else {
pageBuilder.setTimestamp(outputColumn, pageReader.getTimestamp(inputColumn));
}
}
}
};
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.