answer
stringlengths 17
10.2M
|
|---|
package corejava.streams;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.util.stream.Stream;
public class FileStreams {
private static final String pathToRead = "/usr/share/dict/words";
public static void main(String[] args) {
try {
findMaxLength(getStreamFromFile(pathToRead));
findAverageLength(getStreamFromFile(pathToRead));
findWordsWithOnlyUniqueLetters(getStreamFromFile(pathToRead));
} catch (FileNotFoundException e) {
System.out.println(e.getMessage());
}
}
private static Stream<String> getStreamFromFile(String filePath) throws FileNotFoundException {
return new BufferedReader(new FileReader(filePath)).lines();
}
private static void findWordsWithOnlyUniqueLetters(Stream<String> linesStream) {
linesStream.filter(FileStreams::filterDistinctWords).forEach(System.out::println);
}
private static void findAverageLength(Stream<String> linesStream) {
linesStream.mapToInt(String::length).average().ifPresent(System.out::println);
}
private static void findMaxLength(Stream<String> linesStream) {
linesStream.mapToInt(String::length).max().ifPresent(System.out::println);
}
private static boolean filterDistinctWords(String str) {
for (char ch : str.toCharArray()) {
if (str.indexOf(ch) != str.lastIndexOf(ch)) {
return false;
}
}
return true;
}
}
|
package dr.app.tools;
import dr.app.beauti.util.XMLWriter;
import dr.evolution.io.NewickImporter;
import dr.evolution.tree.FlexibleNode;
import dr.evolution.tree.Tree;
import dr.evolution.util.Taxa;
import dr.evolution.util.Taxon;
import java.io.*;
import java.text.DecimalFormat;
/**
* @author Alexei Drummond
* @author Walter Xie
*/
public class GetDateFromTree extends NewickImporter {
public GetDateFromTree(Reader reader) {
super(reader);
}
public GetDateFromTree(String treeString) {
super(treeString);
}
static public void main(String[] args) {
int index = 10;
try {
System.out.println(inputFileName + "\n\n");
FileReader fileReader = new FileReader(inputFileName);
GetDateFromTree getDateFromTree = new GetDateFromTree(fileReader); // many trees
int totalTrees = 0;
try {
while (getDateFromTree.hasTree()) {
Tree tree = getDateFromTree.importNextTree();
if (totalTrees == index * 100) { // 1000
System.out.println(totalTrees);
getDate(index, tree);
index += 10;
}
totalTrees++;
}
} catch (ImportException e) {
System.err.println("Error Parsing Input Tree: " + e.getMessage());
return;
}
fileReader.close();
System.out.println(totalTrees);
} catch (FileNotFoundException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
} catch (IOException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
// for (String t : trees) {
// GetDateFromTree getDateFromTree = new GetDateFromTree(t);
// getDate(index, getDateFromTree, t);
// index += 10;
}
private static void getDate(int index, Tree tree) { // many trees
double[] tips = new double[tree.getExternalNodeCount() + 1];
double rootHeight;
System.out.println(tree);
for (int i = 0; i < tree.getTaxonCount(); i++) {
FlexibleNode node = (FlexibleNode) tree.getExternalNode(i);
// System.out.println(node.getTaxon() + " has " + node.getHeight());
tips[Integer.parseInt(node.getTaxon().getId())] = node.getHeight();
}
rootHeight = ((FlexibleNode) tree.getRoot()).getHeight();
System.out.println("tree " + index + " root height = " + rootHeight);
System.out.println("\n");
if (index < 0) {
printXML(tips);
} else {
DecimalFormat twoDForm = new DecimalFormat("
try {
outputXML(index, tips, Double.valueOf(twoDForm.format(rootHeight + 1.0)), tree.toString());
} catch (IOException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
System.out.println("\n");
}
private static void getDate(int index, GetDateFromTree getDateFromTree, String treeString) { // single tree or import trees
Taxa taxa = new Taxa();
for (int n = 1; n <= 100; n++) {
Taxon t = new Taxon(Integer.toString(n));
taxa.addTaxon(t);
}
double[] tips = new double[taxa.getTaxonCount() + 1];
double rootHeight;
try {
Tree tree = getDateFromTree.importTree(taxa);
System.out.println(tree);
for (int i = 0; i < tree.getTaxonCount(); i++) {
FlexibleNode node = (FlexibleNode) tree.getExternalNode(i);
// System.out.println(node.getTaxon() + " has " + node.getHeight());
tips[Integer.parseInt(node.getTaxon().getId())] = node.getHeight();
}
rootHeight = ((FlexibleNode) tree.getRoot()).getHeight();
System.out.println("tree " + index + " root height = " + rootHeight);
System.out.println("\n");
} catch (ImportException e) {
System.err.println("Error Parsing Input Tree: " + e.getMessage());
return;
} catch (IOException e) {
System.err.println("Error Parsing Input Tree: " + e.getMessage());
return;
}
if (index < 0) {
printXML(tips);
} else {
DecimalFormat twoDForm = new DecimalFormat("
try {
outputXML(index, tips, Double.valueOf(twoDForm.format(rootHeight + 1.0)), treeString);
} catch (IOException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
}
System.out.println("\n");
}
private static void outputXML(int index, double[] tips, double orign, String treeString) throws IOException {
String f = path + curD + "/T" + curD + "_" + Integer.toString(index) + ".xml";
System.out.println(f);
XMLWriter w = new XMLWriter(new BufferedWriter(new FileWriter(new File(f))));
w.writeText("<?xml version=\"1.0\" standalone=\"yes\"?>\n" + "\n" +
"<!-- Generated by BEAUTi v1.7.0 Prerelease r3910 -->\n" +
"<!-- by Alexei J. Drummond and Andrew Rambaut -->\n" +
"<!-- Department of Computer Science, University of Auckland and -->\n" +
"<!-- Institute of Evolutionary Biology, University of Edinburgh -->\n" +
"\n" +
"<beast>\n" +
"\n" +
"\t<!-- The list of taxa to be analysed (can also include dates/ages). -->\n" +
"\t<!-- ntax=100 -->\n" +
"\t<taxa id=\"taxa\">\n");
for (int n = 1; n < tips.length; n++) {
w.writeText("\t<taxon id=\"" + n + "\">\n" +
"\t\t<date value=\"" + tips[n] + "\" direction=\"backwards\" units=\"years\" />\n" +
"\t</taxon>\n");
}
w.writeText("\t</taxa>\n");
w.writeText("\t<!-- Stadler et al (2011) : Estimating the basic reproductive number from viral sequence data, Submitted.-->\n" +
"\t<birthDeathSerialSampling id=\"bdss\" units=\"substitutions\">\n" +
"\t\t<birthRate>\n" +
"\t\t\t<parameter id=\"bdss.birthRate\" value=\"2\" lower=\"0.0\" upper=\"Infinity\"/>\n" +
"\t\t</birthRate>\n" +
"\t\t<relativeDeathRate>\n" +
"\t\t\t<parameter id=\"bdss.relativeDeathRate\" value=\"0.5\" lower=\"0.0\" upper=\"Infinity\"/>\n" +
"\t\t</relativeDeathRate>\n" +
"\t\t<sampleProbability>\n" +
"\t\t\t<parameter id=\"bdss.sampleProbability\" value=\"0.01\" lower=\"0.0\" upper=\"1.0\"/>\n" +
"\t\t</sampleProbability>\n" +
"\t\t<psi>\n" +
"\t\t\t<parameter id=\"bdss.psi\" value=\"0.5\" lower=\"0.0\" upper=\"Infinity\"/>\n" +
"\t\t</psi>\n" +
"\t\t<origin>\n" +
"\t\t\t<parameter id=\"bdss.origin\" value=\"" + orign + "\" lower=\"0.0\" upper=\"Infinity\"/>\n" +
"\t\t</origin>\n" +
"\t\t<sampledRemainInfectiousProb>\n" +
"\t\t\t<parameter id=\"bdss.sampledRemainInfectiousProb\" value=\"0.0\" lower=\"0.0\" upper=\"1.0\"/>\n" +
"\t\t</sampledRemainInfectiousProb>\n" +
"\t\t<finalTimeInterval>\n" +
"\t\t\t<parameter id=\"bdss.finalTimeInterval\" value=\"0.1\" lower=\"0.0\" upper=\"Infinity\"/>\n" +
"\t\t</finalTimeInterval>\n" +
"\t</birthDeathSerialSampling>\n");
w.writeText("\t<RPNcalculator id=\"R0\">\n" +
"\t\t<variable name=\"b\">\n" +
"\t\t\t<parameter idref=\"bdss.birthRate\"/>\n" +
"\t\t</variable>\n" +
"\t\t<variable name=\"d\">\n" +
"\t\t\t<parameter idref=\"bdss.relativeDeathRate\"/>\n" +
"\t\t</variable>\n" +
"\t\t<variable name=\"s\">\n" +
"\t\t\t<parameter idref=\"bdss.psi\"/>\n" +
"\t\t</variable>\n" +
"\t\t<variable name=\"1_r\">\n" +
"\t\t\t<parameter idref=\"bdss.sampledRemainInfectiousProb\"/>\n" +
"\t\t</variable>\n" +
"\t\t<expression name=\"R0\">\n" +
"\t\t\tb b d * s 1 1_r - * + /\n" +
"\t\t</expression>\n" +
"\t</RPNcalculator>\n");
w.writeText("\n" +
"\t<!-- This is a simple constant population size coalescent model -->\n" +
"\t<!-- that is used to generate an initial tree for the chain. -->\n" +
"\t<constantSize id=\"initialDemo\" units=\"substitutions\">\n" +
"\t\t<populationSize>\n" +
"\t\t\t<parameter id=\"initialDemo.popSize\" value=\"100.0\"/>\n" +
"\t\t</populationSize>\n" +
"\t</constantSize>\n" +
"\n" +
"\t<!-- Generate a random starting tree under the coalescent process -->\n" +
"\t<newick id=\"startingTree\">\n");
w.flush();
w.write(treeString);
w.flush();
w.writeText("\n" + "\t</newick>\n" +
"\n" +
"\t<!-- Generate a tree model -->\n" +
"\t<treeModel id=\"treeModel\">\n" +
"\t\t<coalescentTree idref=\"startingTree\"/>\n" +
"\t\t<rootHeight>\n" +
"\t\t\t<parameter id=\"treeModel.rootHeight\"/>\n" +
"\t\t</rootHeight>\n" +
"\t\t<nodeHeights internalNodes=\"true\">\n" +
"\t\t\t<parameter id=\"treeModel.internalNodeHeights\"/>\n" +
"\t\t</nodeHeights>\n" +
"\t\t<nodeHeights internalNodes=\"true\" rootNode=\"true\">\n" +
"\t\t\t<parameter id=\"treeModel.allInternalNodeHeights\"/>\n" +
"\t\t</nodeHeights>\n" +
"\n" +
"\t\t<!-- END Tip date sampling -->\n" +
"\t</treeModel>\n" +
"\t\n" +
"\t<!-- Generate a speciation likelihood for Yule or Birth Death -->\n" +
"\t<speciationLikelihood id=\"speciation\">\n" +
"\t\t<model>\n" +
"\t\t\t<birthDeathSerialSampling idref=\"bdss\"/>\n" +
"\t\t</model>\n" +
"\t\t<speciesTree>\n" +
"\t\t\t<treeModel idref=\"treeModel\"/>\n" +
"\t\t</speciesTree>\n" +
"\t</speciationLikelihood>\n" +
"\n" +
"\t<!-- Define operators -->\n" +
"\t<operators id=\"operators\">\n" +
"\t\t\n" +
"\t\t<scaleOperator scaleFactor=\"0.75\" weight=\"10\">\n" +
"\t\t\t<parameter idref=\"bdss.origin\"/>\n" +
"\t\t</scaleOperator>\n" +
"\t\t\n" +
"\t\t<scaleOperator scaleFactor=\"0.75\" weight=\"10\">\n" +
"\t\t\t<parameter idref=\"bdss.psi\"/>\n" +
"\t\t</scaleOperator>\n" +
"\t\t<scaleOperator scaleFactor=\"0.75\" weight=\"10\">\n" +
"\t\t\t<parameter idref=\"bdss.birthRate\"/>\n" +
"\t\t</scaleOperator>\n" +
"\t\t<scaleOperator scaleFactor=\"0.75\" weight=\"10\">\n" +
"\t\t\t<parameter idref=\"bdss.relativeDeathRate\"/>\n" +
"\t\t</scaleOperator>\n" +
"\n" +
"\t</operators>");
w.flush();
w.writeText("\n" +
"\t<!-- Define MCMC -->\n" +
"\t<mcmc id=\"mcmc\" chainLength=\"20000000\" autoOptimize=\"true\">\n" +
"\t\t<posterior id=\"posterior\">\n" +
"\t\t\t<prior id=\"prior\">\n" +
"\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"100000.0\">\n" +
"\t\t\t\t\t<parameter idref=\"bdss.birthRate\"/>\n" +
"\t\t\t\t</uniformPrior>\n" +
"\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"1.0\">\n" +
"\t\t\t\t\t<parameter idref=\"bdss.relativeDeathRate\"/>\n" +
"\t\t\t\t</uniformPrior>\n" +
"\t\t\n" +
"\t\t\t\t<betaPrior shape=\"1.0\" shapeB=\"1.0\" offset=\"0.0\">\n" +
"\t\t\t\t\t<parameter idref=\"bdss.sampleProbability\"/>\n" +
"\t\t\t\t</betaPrior>\n" +
"\t\t\t\t\n" +
"\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"1.7976931348623157E308\">\n" +
"\t\t\t\t\t<parameter idref=\"bdss.origin\"/>\n" +
"\t\t\t\t</uniformPrior>\n" +
"\t\t\t\t\n" +
"\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"1000.0\">\n" +
"\t\t\t\t\t<parameter idref=\"bdss.finalTimeInterval\"/>\n" +
"\t\t\t\t</uniformPrior>\n" +
"\t\t\t\t<uniformPrior lower=\"0.0\" upper=\"100.0\">\n" +
"\t\t\t\t\t<parameter idref=\"bdss.psi\"/>\n" +
"\t\t\t\t</uniformPrior>\n" +
"\t\t\t\t\n" +
"\t\t\t</prior>\n" +
"\t\t\t<likelihood id=\"likelihood\">\n" +
"\t\t\t\t<speciationLikelihood idref=\"speciation\"/>\n" +
"\t\t\t</likelihood>\n" +
"\t\t</posterior>\n" +
"\t\t<operators idref=\"operators\"/>\n");
w.flush();
w.writeText("\n" +
"\t\t<!-- write log to screen -->\n" +
"\t\t<log id=\"screenLog\" logEvery=\"200000\">\n" +
"\t\t\t<column label=\"Posterior\" dp=\"4\" width=\"12\">\n" +
"\t\t\t\t<posterior idref=\"posterior\"/>\n" +
"\t\t\t</column>\n" +
"\t\t\t<column label=\"Prior\" dp=\"4\" width=\"12\">\n" +
"\t\t\t\t<prior idref=\"prior\"/>\n" +
"\t\t\t</column>\n" +
"\t\t\t<column label=\"Likelihood\" dp=\"4\" width=\"12\">\n" +
"\t\t\t\t<likelihood idref=\"likelihood\"/>\n" +
"\t\t\t</column>\n" +
"\t\t\t<column label=\"rootHeight\" sf=\"6\" width=\"12\">\n" +
"\t\t\t\t<parameter idref=\"treeModel.rootHeight\"/>\n" +
"\t\t\t</column>\n" +
"\t\t\t<parameter idref=\"bdss.birthRate\"/>\n" +
"\t\t\t<parameter idref=\"bdss.relativeDeathRate\"/>\n" +
"\t\t\t<parameter idref=\"bdss.psi\"/>\n" +
"\t\t\t<parameter idref=\"bdss.sampledRemainInfectiousProb\"/>\n" +
"\t\t\t<RPNcalculator idref=\"R0\"/>\n" +
"\t\t\t\n" +
"\t\t</log>\n" +
"\n" +
"\t\t<!-- write log to file -->\n" +
"\t\t<log id=\"fileLog\" logEvery=\"2000\" fileName=\"T" + curD + "_" + Integer.toString(index) + ".log\" overwrite=\"false\">\n" +
"\t\t\t<posterior idref=\"posterior\"/>\n" +
"\t\t\t<prior idref=\"prior\"/>\n" +
"\t\t\t<likelihood idref=\"likelihood\"/>\n" +
"\t\t\t<parameter idref=\"treeModel.rootHeight\"/>\n" +
"\n" +
"\t\t\t<parameter idref=\"bdss.birthRate\"/>\n" +
"\t\t\t<parameter idref=\"bdss.relativeDeathRate\"/>\n" +
"\t\t\t<parameter idref=\"bdss.sampleProbability\"/>\n" +
"\t\t\t<parameter idref=\"bdss.psi\"/>\n" +
"\t\t\t<parameter idref=\"bdss.origin\"/>\n" +
"\t\t\t<parameter idref=\"bdss.sampledRemainInfectiousProb\"/>\n" +
"\t\t\t<parameter idref=\"bdss.finalTimeInterval\"/>\n" +
"\t\t\t<RPNcalculator idref=\"R0\"/>\n" +
"\t\t\t\n" +
"\n" +
"\t\t\t<!-- END Tip date sampling -->\n" +
"\t\t\t<speciationLikelihood idref=\"speciation\"/>\n" +
"\t\t</log>\n" +
"\n" +
"\t\t<!-- write tree log to file -->\n" +
"\t\t<logTree id=\"treeFileLog\" logEvery=\"2000\" nexusFormat=\"true\" fileName=\"T" + curD + "_" + Integer.toString(index) + ".trees\" sortTranslationTable=\"true\">\n" +
"\t\t\t<treeModel idref=\"treeModel\"/>\n" +
"\t\t\t<posterior idref=\"posterior\"/>\n" +
"\t\t</logTree>\n" +
"\t</mcmc>\n" +
"\t<report>\n" +
"\t\t<property name=\"timer\">\n" +
"\t\t\t<mcmc idref=\"mcmc\"/>\n" +
"\t\t</property>\n" +
"\t</report>\n" +
"</beast>\n");
w.flush();
w.close();
}
private static void printXML(double[] tips) {
System.out.println("\t<taxa id=\"taxa\">");
for (int n = 1; n < tips.length; n++) {
System.out.println("\t<taxon id=\"" + n + "\">");
System.out.println("\t\t<date value=\"" + tips[n] + "\" direction=\"backwards\" units=\"years\" />");
System.out.println("\t</taxon>");
}
System.out.println("\t</taxa>");
}
// static final String testingTree = "((1:1,2:1):1,(3:0.25,4:0.75):1);";
// static final String treeImported = "((3:0.3824976747708124,94:3.401966643328916):0.2131455444432433,((63:0.7035650391093435,(((61:0.2412493040961774,(((69:0.8620006828340152,((48:0.5302937984554452,((66:0.49360327162170314,96:0.7203824406192432):0.26805517248648547,(84:1.6647723439430746,18:1.7899504518955753):8.032342341470766E-4):0.3878430419933654):0.0020541955766213427,(((39:0.547175618789725,41:0.7713476736692841):1.5199644442202225,59:2.300348903048058):0.3545780299502965,(5:2.173657685837823,(7:0.4831518556113801,46:0.04259431830686622):0.9936840610288644):0.3828314786413345):0.06944108321445075):0.2916463175592021):0.8511296243775233,(((13:0.21179557713185626,65:0.9050599247224358):1.2063337719436882,(19:0.961325550099684,(71:0.27227635823039753,((72:0.3623541449244776,34:1.1593527329427098):0.4329707059057515,24:1.750349112100648):0.8548087225791183):0.33826729136137956):0.014057115381393093):0.6343929119615161,99:3.7025837488577316):0.543866697708435):0.1836689943905121,((((((82:1.7276842975724649,91:1.607723421568065):0.1933685581812319,((28:0.23706203140931148,((86:0.5529558273083699,67:0.29663788595939145):0.281373008673687,31:0.8632750147330384):0.5341346272330105):0.7280205172816232,73:0.11489207430514847):0.1042334879532878):0.22023308807550723,97:2.0816724934432385):0.16356342886852548,(25:0.85596627446141,81:0.10864654588741773):0.3265316815053567):0.7052085780791693,((((54:1.2513925633251677,15:1.7216422570576408):1.1052980890372737,79:1.4025557656843413):0.4324573365468609,((60:1.2665130997995677,(23:0.9699990116584232,(58:0.17796506940081697,27:0.3614456732505629):1.3967642598709824):0.4421690408682921):0.7888785083571337,((76:0.1711906024989267,49:1.2710678644023476):0.060131414497704094,64:1.2593916884637246):1.5454624508194899):0.24056330854342534):0.015283090096886554,((32:1.3278266797752578,50:1.2629774321495602):0.7777520083215703,88:0.02772431983654222):1.1101361895232151):0.5963623233265598):0.3665009131043293,((((16:1.9309003970287875,(((90:0.5445216783903744,6:0.8548831149642158):0.2741249294450656,30:0.008362111012826023):1.022234909613422,(40:0.7366132749777619,83:0.5070914524648216):1.3875613903422281):0.5001700205337869):0.7233479987224163,((1:0.6003333838940725,98:0.3415505496586113):2.588962722413393,(4:0.2759376488056353,29:0.47002294631420005):0.898552467271085):0.2362157981780597):0.11672800780799664,((10:1.1355322499789322,43:0.5843151524612831):0.701686409717148,52:1.9374978201794926):1.417945520858075):0.7586500833008447,9:0.3178627566272416):0.03155457313484966):0.14868731051955386):0.3667216608428072):0.02390814464028157,(((((35:1.086623144974067,74:0.692186651442158):0.10673574135114072,(78:0.1638180965937629,89:0.250663800794962):0.08265189041229482):1.4629757070659404,93:1.6063646891197325):1.4550877544464176,((37:0.9226166936844796,21:1.044343709701885):1.5676034487217074,100:1.3447223957080423):1.088773395481709):0.08284086233381505,((((56:2.355869340652318,45:0.4032979506669325):0.3330320301280212,((2:0.5357734051244822,53:1.0622382154097905):1.0714020295534217,(57:1.9553678171881557,92:1.9351676011453818):0.3176309095081522):0.33889485298905964):1.0910719751499318,51:0.06358005997870864):0.24119223678720436,(12:1.0464825190142348,8:1.0984225849334481):1.6150168196252994):0.13538891424735233):0.654056534527701):0.68828126623945,((((95:1.5084591517564723,87:1.0618807586778019):0.13101510403314265,75:1.1323197425395333):2.2878458817626433,36:2.864052872624789):0.6509399099742286,(((17:0.06524692359560014,((((33:0.017056678783653467,(77:0.768977532338945,22:0.2066823108679694):0.3345642602125296):0.600193544474839,47:0.6997906421703952):0.5100738160877549,(70:0.5677418730833756,42:1.9551556564280363):1.6169469279243813):0.3307075738681533,20:2.492040542806981):0.525443622448492):0.09433286051003886,((11:0.3113555135611077,(38:0.44389504903251376,44:1.2708469141489145):0.2622045045057788):1.2732398364643553,(26:1.6716254866488542,(55:0.9828398772396151,68:0.21551121655825067):1.106998506688205):0.5526554864260289):1.671928572660197):0.12614845672773534,(80:0.34195834822687576,(85:0.09264904116899952,62:1.713709090294202):0.18592099291105324):0.4792020544951763):0.2727014597498254):0.2650194727743962):2.4225764992771586):0.12072071192465827,14:0.47972787703636754):2.757605090929111);";
// T2 sampledRemainInfectiousProb = 1; T1 sampledRemainInfectiousProb = 0
static final String path = "C:/Users/dxie004/Documents/HIV Swiss Cohort study/Tanjia Birth-Death/T32/";
static final String curD = "10";
static final String inputFileName = path + curD + "/T32.trees";
// static final String[] trees = {
// "(((100:0.2759295941079598,((6:0.1780547126475307,90:0.2773585176913773):0.4324798358805104,(77:0.4366037591568581,(((60:0.06114593641368682,(((45:1.1858532193532096,69:0.4857462933631136):1.0703793675170519,14:1.0080965286106625):0.7531830236235892,(((72:0.4734480373733323,13:1.6414737257868168):0.31345510548895206,9:1.2799154503620205):0.36580160160488395,55:0.6521307508046301):0.13222292659177048):0.6843395008777939):0.08358823875237409,(63:0.1173962764017622,(((((83:0.06388497493919498,(87:0.06971104871286249,96:0.11513746603637598):0.2617677248167065):0.23523100742624647,11:2.5488642344318637):0.5709241701969603,(((59:0.13001207544150828,4:0.534509372507409):0.9065013929723875,(((78:0.4300613180395787,(54:0.5881259163967514,37:0.09161743362459873):0.10007317973276297):0.3581939195808168,71:0.8417847156069083):0.6760435746540918,56:0.34728849185761557):0.0011569801790649592):1.6147112046099765,(98:1.8505753493501458,17:0.9280241884545952):1.5062115835297483):0.6173650351656441):0.0175798939167926,40:1.4327849159224866):0.6484216994792886,42:0.5649331356339093):0.044490735903595535):0.10065007060014697):0.7480152893310459,((19:1.0453148233101777,65:0.03982690649499432):0.015852971398985005,((((((((24:0.6426142775167778,49:0.26507022798260027):0.7090057146513142,((41:0.14186057102002847,67:0.026961796503665925):0.36991918712876304,79:0.5815959657493615):1.024603334843216):0.024183855106581387,86:0.08436576474963875):0.3946779683409196,((61:0.21049623087835378,(16:0.22329709973960155,93:0.0980200763866037):0.11081431464717495):0.8467610850802538,31:1.2165550002603938):0.06206931566212637):0.7024059789204573,((18:0.5110908536902983,34:0.21183915417614707):0.882488365662339,73:0.23324499594150505):2.0871456603448966):0.44201612676651125,99:0.455825827781966):0.638245486290975,(39:0.07658437481359659,58:0.1764000007473152):0.8470799448815418):0.13252970716211898,53:0.5440963965993131):0.7116321144589479):0.04446103714470162):0.5570925454659523):0.8349620719218072):0.39537618606560354):0.23246534833540267,((70:0.1996975744937446,((30:0.25043743640342697,((((89:1.6127193503514903,(82:0.14048148155413376,57:1.0784083186686084):0.38780755293423397):0.24644757705303455,80:0.24722619589322203):0.06489043744710532,7:1.6582683265776672):0.4481749866378619,(2:1.8804071294900604,3:0.023035004075473875):0.7446717171494495):0.2814795274087576):1.0069357276627375,((((((50:0.009904531387953774,(51:0.04974396264227465,((28:0.6164291583816088,33:0.7028750326597675):0.020632702991932206,52:0.2620934650458022):1.6169083530976258):0.11188677321436113):0.5897740532249305,((((84:0.00915957993005767,75:1.403360151297104):0.17953794102191267,81:0.0627513917657474):0.37241766310854807,(((94:0.7136713627250614,10:0.008279153277477747):0.11337143032465669,(68:0.9476218248680841,8:0.46104904327810214):0.3082656519911391):1.0277836373882483,35:1.139900635404104):0.3649293864599641):0.04053080400729936,62:0.43372394838795225):0.8450036334702116):0.21676992831331532,97:0.3937210281260244):0.6217923509933945,66:0.3065474310864129):0.8170463207263605,44:0.026894004345598965):0.015539344835022284,((20:0.1252746831931999,12:0.2706972912118957):0.23084681536101925,(((21:0.04108670219976496,((26:0.21683708421460812,(32:0.14412489943602447,(((74:0.18560721020572668,(95:0.362902776402263,((25:0.42464905250257207,1:0.5164820341492313):0.5925646746092141,85:0.13636374873279244):0.0031874038009704897):0.0044990893278598065):0.10197883895896687,(43:0.05393610804603166,46:0.926216914294938):0.1084119847356193):0.34337795022535045,29:0.5918636480714469):1.0448119892689047):0.31409259065885076):0.6919113759361757,91:0.7479793351634823):0.6136793015327289):0.27258213498288786,((((((48:0.1306215018764466,(88:0.8711354400164752,36:0.4491587803517647):0.055997269374821856):0.06583165565941829,27:1.0506149946403638):2.364007453784181,5:0.45338514679037667):0.053644616923241184,92:0.7051063242279012):0.35121217569111796,47:0.5874377988158308):0.3714369733787217,(64:0.032098512520678746,15:0.07581713088827158):0.03448907859428907):0.2810624510789914):0.30792407694989077,76:1.1156644058672276):0.39531170115012415):0.08177480035578455):0.5568135150951967):0.14934174255400912):1.1303652342548354,(38:0.2423418362203531,23:0.3173102378974546):0.32247555533508354):0.5340110001664744):0.3446866710113099,22:0.12145978249112854);",
// "(41:0.5917472044147383,((((22:0.004822363771718408,12:0.036453899723426275):0.038192624923388596,85:0.005582055959308274):1.6405071172099777,32:0.6601791552306269):0.9614057600012043,(96:0.3000557138093116,(((71:1.4169394049035824,((((91:0.1370559611415103,21:1.0666517759455028):0.3013823716294357,(16:0.0573837354307003,65:0.01605939620417085):0.7395436085286314):0.39351458733091915,((4:0.5766757437033982,49:0.5924370722126595):0.6830223876929804,(63:0.44544320701755735,((78:0.5356237019093407,69:0.39872973970167014):0.22658400724539007,2:0.007295231526302626):0.2619316314185782):0.12247535755605843):0.4613297883047476):0.22135415594459662,((66:0.0909903756180076,79:0.23944532548434083):0.9326451214609349,77:0.08611690710177422):0.4928995981351125):0.35086338774172576):0.42888176198105565,25:0.22372954562462066):1.6561895153049986,((8:0.7252985413015516,(((((19:0.5731481928179369,38:0.5628148179977828):0.7280619924310128,(((51:0.774714515779841,(20:0.30259361812520247,(((45:0.22547818679459386,(15:0.11155583153541332,89:0.0019180047209065854):1.2608754603900336):0.20066176408596847,5:0.011746941347904327):0.1258811004867788,99:0.005171249553354684):0.2856988544773096):0.5919984583930877):0.6680436985029119,62:0.4929500980465096):0.3407459001243027,(56:0.16112858782823203,((((86:0.9999158037021223,(((((6:0.3124148501489347,9:0.39002673818040207):0.15232159499193254,42:0.025343389649686188):0.15783625157397607,(52:0.08252282465787572,1:0.1903249012293786):0.8355165755561378):0.8456377495528147,(29:0.09306183195060469,(36:0.5121393333926862,61:0.6151895034113255):0.13125039698954954):0.36730220486517884):0.22509374921401326,((70:0.548251196265543,(54:1.068022441103647,98:0.3918098219375371):0.17015240619493):0.13477388544955327,76:1.32875931188856):0.5573870865863535):0.23948185129769595):0.660980035309358,((50:0.3183515122501228,43:0.4330103644785115):0.7161229906550692,((46:0.6982485369753215,28:0.35914233093703896):0.9456063575498628,((60:0.15152705573659953,88:0.9471691368291206):0.006131445480609177,57:1.2055930837668711):0.1168845181106466):0.2821811539741428):0.28658450213926434):0.12231958355958028,100:0.16886198799975904):0.39432418425509486,(((35:0.5476378565836209,(((18:0.225425185335262,33:0.08978391582746603):0.006197436593837091,(84:0.23753266096590897,47:0.054635169404989026):0.22815199471594472):0.040090186625441504,((68:0.5202211551347012,72:0.15964331441430202):0.411489869130763,(55:0.20859691653340207,73:0.13553593601346248):0.13666515596971207):0.006818326385002571):0.23567377464165418):0.4714343657989333,48:1.5927409626470213):0.3766517469902353,(67:0.12673772400627192,7:1.1462220199153317):0.974055300069147):1.1623642735878006):1.5495037998203287E-4):0.18315690644391847):0.04635664312022536):0.07334039689344474,59:0.22350091373092962):0.03682424644454185,(((80:0.05236949965089277,94:0.23241336373873311):0.9433465766797506,30:0.037619487047705125):0.14754844432500747,(((64:0.27544712472171096,17:0.32206323453342955):0.10492373292922874,(3:0.04182901501472891,((34:1.2737727909122611,11:0.04061530717404138):0.6145476708779154,58:0.4772657820579229):0.5726791297976788):0.5339313711611564):0.1932764110884433,(53:0.2898114246653787,((23:0.45593536930150247,82:0.8656995404468664):0.3872574897975163,((((90:0.38032240461584155,13:0.47798888209289925):0.17639524849569743,27:0.197037289443029):0.16238512947848016,74:0.026071102004512126):1.3140863224468318,((24:2.258874030528173,92:1.6095849857326585):0.029794673691030393,14:0.502284744602526):0.32014871387072485):0.4332773701450061):0.07368159395796603):0.4255757485182081):0.14154533689937532):0.15314283412876462):0.20316950598011063,((81:0.3780202281433547,44:0.010725176953772397):0.09495371670612895,(40:0.12414949153226029,31:0.5324837044517716):0.3872132895991154):0.5652026433796182):0.13878612969561122):0.5163154959983132,(((83:0.010850489430678234,10:0.039730311543561214):0.00451766365623385,26:0.019161324722043016):1.8474257478546705,((93:0.3326316546893904,75:1.630889359942298):1.3819910060920821,((39:0.49621150559368643,(37:0.44370634124779795,97:0.08068959368103207):0.2850705764937068):0.4555503144811305,(87:0.426393053197235,95:0.6627568262239785):0.5328647905832615):2.6497694981512687):0.41591761250949366):0.34497135700431514):0.008789846631671594):0.9178353805692847):0.6415881181349361):0.4310608834467935);",
// "(((((38:0.20945893103632507,67:0.2545479255397307):0.2222080071554533,(((65:0.31774859025521174,50:0.07620494738921835):0.511670460174015,85:0.03794764374746862):0.13373395694374146,60:0.2915010611112834):0.19292070159626284):0.7510849391983871,(42:0.42855486719470903,(((63:0.4582823839850825,(((27:0.06037136613195582,((86:0.8957230982196371,((((47:1.0876476672004083,79:0.7621070044901401):0.3376276785627432,16:0.13878068280908806):0.20702134127963667,((((((64:0.018276586754216506,(((24:0.5740147182619897,51:0.42639104987822896):0.32421320524260455,(44:0.8803456367835127,((57:1.0683310288125438,(58:0.6862531509910688,(8:0.430013497883464,37:0.14125995581964101):0.599229054012099):0.030301804680573197):0.06292735899974655,22:0.10081717347691099):0.10011244948457043):0.09604306051323697):1.3542410309549537,54:1.133612494118391):0.0030180869682210343):1.108182455849681,(73:0.2511326714839113,59:0.7513681943684358):0.6440442650082128):0.008753391773896002,((((4:0.1407154090142544,87:0.10374014119431707):0.1975453197206185,20:0.3765702595001357):0.5738151926793509,((68:0.975351222203803,(12:0.35922406519377004,(43:0.17987010921679436,26:0.3936334594191574):0.2144702754592993):0.2126942646121558):0.284290661461547,(97:0.670930362000043,29:0.45435268212249835):0.647479266319371):0.2948457500708177):0.3701633192136722,19:0.20758256418923215):1.796434872270015):0.03523791698211065,((55:0.22065938446675304,31:0.3281976310359913):1.1731485794688266,(21:0.9203936215475217,(18:0.9405704959476324,(((89:1.3016359089711877,(46:1.1231719777003286,1:1.2997693575898497):1.2145534332520014):0.1414433717737853,(96:0.9467745613694305,13:0.009091421818136602):0.004117634916118718):0.9480455844029891,80:0.21065355514420192):0.12983898260500748):0.08641438208265573):0.03403368275555607):0.02696801550694916):0.9188605033069219,((49:1.8751475635664305,91:0.23997884222228505):0.971583078731602,(72:1.1079609618600315,2:0.6690219812752303):0.7536702933599404):0.7335134063491546):0.9722705746091149,71:0.1998659027383649):0.04282118073808494):0.06924540377044774,(70:0.07782821709723997,((23:0.053955970385401564,(((94:0.1259661733564399,((((7:0.4129984244303433,((53:0.3346791590896182,76:0.19703779443461183):1.3006982251854122,(40:0.3199236700753967,5:0.29240810767027714):0.7437410708062011):0.22459654760464365):0.11259202385590905,17:0.11920963478805868):1.138680469035327,56:1.0923247967265817):0.17109242963366134,((34:0.1590251305241952,98:0.5495814898246647):0.4847274065698195,(77:0.42792955513817676,(48:0.8982496932991122,69:0.028245620612876943):0.1953893988437101):0.7234967683583065):0.7995854614340314):0.17078626357896587):0.031742310394827555,28:0.49961022133586086):0.11585793955393253,(61:0.19723668031090558,81:0.22390092119131655):0.16398971737028267):0.5950136595790352):0.2426565942524741,(74:0.39080856473024106,92:0.09304519111921206):0.020287748400023453):0.4905334846330547):0.7895582476861076):0.885610801674324):0.47533228457303167,(10:0.23847259111848018,14:0.3544437659446791):0.26365579922420856):0.19957937389126634):0.8259354324509038,93:1.1113311393599945):1.0403049312127894,100:0.71418665896379):0.18737578322699733):0.9311386748992216,(41:0.30513975257813186,9:0.1369719748629894):0.6601136387806346):0.6190646695272104,15:0.1294129241627786):0.18830155513736813):0.38891731948782216):0.19794438020097438,45:0.5862610066122542):1.0718124455450653,(((36:0.2913177399704825,((90:0.5552880241882718,((32:0.7479380663619146,95:0.47959514427707184):5.233199806653488E-4,78:1.189268612429517):0.10919740376473719):0.7443342957766994,((25:0.14932169558096042,88:0.5232749980187332):0.40645769335340987,(6:0.5073672639081792,(11:0.5186461551353752,((((83:0.03715113773255563,82:0.2614250212766853):2.175681337194349,(52:0.09423981546368676,99:0.2647423126132731):0.6392657973520794):0.22033697732996993,35:0.3131657223274318):0.7309269197135797,((62:0.45652484324192866,(75:0.029001350347898303,30:0.03606563391990392):0.09356574068423384):0.6597114808400275,39:0.33057992125784175):0.491794075591081):1.8882986408869158):0.5962601572361033):1.9156514806670417):1.901609813062942):0.6254831082284333):1.0822951207746563,(3:0.9429879121409019,(33:1.5282283228151101,66:0.37795674680008773):1.5778275620696398):0.1932596604934531):0.7484342011537404,84:1.3028298578034718):0.07192939096426088);",
// "(((63:0.5971341772682925,(((32:0.33088442935805706,42:0.9818064820555488):1.2852789040403396,4:0.13844816725331643):0.9050067937827864,((72:0.11417102495455467,((45:1.093769638861756,(9:0.37013162632819674,73:0.6575430958348033):0.4555728093701473):1.4412695815738972,(92:0.4801417763123417,2:0.5887611487368416):0.22590464841751157):0.2646555154818171):0.41874643609900675,(6:0.5010735799172772,88:0.2158783182343491):0.6600982215947955):0.46104736197928986):0.13061301027676242):0.8481434396452228,(((((((((22:0.6522107792498546,93:0.7995169966454267):0.4113143508752384,87:0.6694193558953427):0.5023266366105756,((13:0.31957985443240466,(61:0.09579431210066092,74:0.41647078615105204):0.3152390654347944):0.548201458839291,30:0.05629404375512892):0.23024850383335083):0.21685794963441918,(((3:0.8419301810112902,64:0.42307005820123145):0.039576328944843864,(53:0.21472032439718214,43:0.516016144807318):0.5522246100593633):0.7936702784986178,35:0.21690341488244647):0.05708285546235148):0.6910012684088946,65:0.04950621815533074):0.6568390165260207,((((27:0.8977893879571093,(36:1.3150826865822502,97:0.2909338557526886):0.17749747011945027):0.35303080406711485,85:0.0762044554733925):0.056372912249887275,95:0.9593645074528676):1.1077705071412955,(33:0.25300787069890873,(25:0.328817766496198,(29:0.04569368473045543,(20:0.6500779516561956,58:0.9473702542902976):0.16562370827308825):0.44200373669096393):0.5891576662646738):0.4490136088204424):0.004221652274960519):0.16607731991498742,12:0.7120751581749354):0.06529281018453803,(70:1.029108169555296,((15:0.013060730855017244,(71:0.14523976087067103,75:1.3879194852363435):0.3333589885555921):0.051095275716666944,((21:0.23379576040460553,5:0.08450396831842438):1.2724933282830246,55:0.14982543268533566):1.5819959456094743):0.04238501767700997):0.42445024487807315):0.9335752130467556,(((((59:0.5583765773710039,82:1.0229814889739464):2.0262284935218986,((((56:0.12181364928154137,((60:0.6216053508855168,80:0.7729964858383543):0.020798946837480514,50:0.8822032898467224):0.435092024954284):0.5210573955778481,68:0.22987542095442626):0.37082246837350796,(11:0.2537773523886817,((66:0.1135105187045009,47:1.4477336353709616):0.08562725773698254,((62:0.7970273485498136,(7:0.11598325830248846,1:0.5036319748241439):1.0526505463359794):0.36590489402030246,(52:0.13340072272101278,91:0.1370546630789482):0.46023881312192794):0.02916959442845113):0.38673537889656706):0.39395868912414267):0.7662491502961601,((37:1.110838344710146,(((38:0.9546481672843476,((69:0.19930238000643707,19:0.6005407545168806):1.1738316084346145,10:0.2551589871022202):0.06215257026410326):0.14250195516227704,(40:0.06886909144807296,((((((17:0.4318672095227283,(28:0.21978697257588475,86:0.03310590083384446):0.5183094602583803):0.023033822015270378,(31:0.17869110898513352,67:0.05718399747186875):0.5752856923994089):0.23278629636613868,(81:0.3724158200364376,79:0.7202517037692553):0.33944175123452003):0.13600438064238518,98:1.3005332208675515):0.08332934278262116,89:0.7225868497783603):0.1119773196570013,(96:1.2696299907945414,((48:0.1594873895676261,((51:0.9125092659214435,34:1.1038362280470857):0.1547607142716534,46:0.020852996045211736):0.029486834734979483):0.007497717851954633,((77:0.452520031779162,54:0.3031274472727311):0.261605600170702,24:0.04227110030653991):0.460488804375494):0.11580824023953551):0.3019265040090098):0.4053199474113922):0.536672565288562):0.011357139898990454,((83:0.9616030616846336,94:0.7024704034658114):0.17522950578763918,99:0.44779756653857117):0.017264833109371658):0.08434130999387612):0.025630671144057704,49:0.19121210472840522):0.45648465294874896):0.7919343107310728):0.10338771978532169,8:0.34674292192684764):0.08267662759021555,(76:0.30439306935274946,((44:0.46079144735569333,16:0.3321601586351881):0.21713822199622346,((((14:1.3256098632440168,100:0.25154989345941625):1.0412638747346932,41:0.5012105641168794):0.5387701436727275,78:0.12391763068654527):0.2886549771279019,90:0.3478513544360764):0.2535068195701742):0.09103915532813911):0.33288272351850434):0.1107114076798883,18:0.33107611762760136):0.0925520120575829):0.11415229255684256):0.8351976614004988,(23:1.3172927167590816,(57:0.16887040433248224,((26:0.5563912527182566,84:1.1354131008126298):2.9402470919902255,39:1.298614026879397):0.13619115668976356):0.8768083947557272):0.1410510185120719);",
// "(((((14:0.06909967074591528,18:0.008348526065371509):0.6199385534414614,(((((86:1.3981866513993146,(22:0.4881370076469834,(1:0.4025199081725816,31:0.36911750360764556):0.1667172441271389):0.8618265297835835):0.42764098995838706,82:0.284276599882209):1.0262901396467705,(90:2.1896351150424467,(73:0.05785032663113321,((53:0.20728929479767944,49:0.10029765055438067):0.21544168455893864,27:0.05036465068452878):0.14548392795725418):0.8132561453660818):0.6733123617557313):0.15503141878426785,((((57:0.4338727446686934,33:0.12777429823861874):0.01848542631124328,(45:0.2587084986996797,((20:0.2969951121192853,85:0.36345684248360033):0.11821434720863178,15:0.9610866570072945):0.071474268687171):0.7796089161360698):0.06787867335300124,(60:0.8458907738040824,58:0.05684723683962445):0.5138731178983449):1.0504064182935218,(100:1.3449632993117353,((59:0.21126555094416744,91:0.692367982396022):0.19655888203227412,(74:0.33956540090437537,(67:0.21345619080552436,55:0.8998759367337437):0.5717035145499962):0.28597367669129947):0.3458544418358547):0.4788892574841346):0.10137981987700817):0.49883571366511426,(51:2.273775714547024,13:0.07272411702726611):1.2209874472349247):0.521076756165876):0.003694570964231758,((69:0.04206467181822804,(((((44:0.29905042820576977,80:0.3390417228377617):1.0144317679586936,83:0.050106714860378876):1.1812672337421168,52:1.2531971000364592):0.02358754733485391,70:0.29357585267344):0.27367876652559175,(16:1.5708494969306326,21:1.5455056524561397):0.7831599355851946):0.9764375206539095):0.018156706152054003,((97:1.5148456577494125,71:0.5246690125380193):0.07097065774823141,24:0.00796403982324101):0.020251856131282775):0.09815278519497106):1.7529274318141512,((46:0.4109712659865652,(93:0.570274206150633,((35:0.1961383428478949,17:0.9921071840272186):1.299181504487429,(((((((2:0.6217027083854507,11:0.1891019933086091):0.12856372761375512,94:0.34779730454202784):1.9077807293710576,29:0.17934268531349673):8.384961698224558E-4,((88:0.14024533879559176,((32:1.051013208159703,(((9:0.1058348957743559,41:0.5567579155764354):0.2631883617620079,28:0.6930870352289574):0.9920113293622722,(((4:0.49972450122803524,63:0.18557769376959699):1.6666481051108573E-4,54:0.7661490886443401):0.5409339612231581,42:0.2031473083262434):0.4825296181352956):0.31233225100781103):0.20564436461532498,76:0.07842107233367068):0.014030709994730106):0.21011313986110558,(((19:0.9979308951587191,(10:0.4256739992591998,62:0.6261078135311389):0.0779527165717655):0.1286844933126159,38:0.5942268614558595):0.3731957884853887,79:1.1101712959586396):0.9063905116202473):0.11750975067738212):0.13776239700878667,96:1.2462085602022133):0.12169701169988567,((50:0.19011562885314381,(25:0.11605092714413656,48:0.9037564933255857):0.15582813661458061):0.4920972897237488,(72:0.915622847439193,81:0.8035378655879422):0.03659526697898219):0.2518329086891824):1.2109815932384644,78:0.3592170406682089):0.006774675898328475):0.7605547591150037):0.10538157033163742):0.4057033426217096,((61:0.22777104834421102,((((((98:0.37619755379267783,(43:1.1482551151698017,(66:0.7523278709453546,23:0.6979851883565462):0.8140482163710253):0.6366417305228953):0.9501229297463536,((7:0.3789273599150307,(92:0.2045643510327264,((3:0.3838100072951548,64:0.07928612864369611):0.7354763043701136,77:0.2907372366124954):0.232659104032793):0.007437910192638064):0.6819571188651095,36:0.2964825578677073):1.094749969768622):0.5236485031240594,56:0.10123767910634518):0.5655443898160564,(((40:0.7329209921223252,((((75:0.8640066366663972,30:0.8497918872125507):0.14789516915738643,(47:0.1382885341897122,8:0.2502450578723663):0.1949062043386478):0.48052499220805256,65:0.3523270072322342):0.5731975322542326,(((39:1.2012755402628539,84:0.07172284256342576):0.09610494695209115,(87:0.2257137090959176,89:0.09331172687770861):0.388880126465341):0.06393683028345443,99:0.23184422695864448):0.7365986599714165):0.7341531748128616):0.35563295423312846,34:0.3183948196370685):0.21484112721304394,12:0.5263288233966033):0.2878371746600479):0.5873181279956823,(68:0.8053867295135282,95:0.03773728634007956):0.01777980553155878):0.37222209704579257,(26:0.25226043555482036,5:0.0863376023858109):2.2940877664535617):0.04529443408887257):0.023232231434962003,6:0.1161858917291072):0.06709043731940145):0.36940580849439364):0.496188641961111,37:0.4496901978141734);",
// "((((66:1.8051857271854743,(27:0.6631995440727945,87:0.9493455897736638):0.14058697866630343):2.044339644575941,(91:0.24714044094438936,(52:0.010082026572115765,(46:0.3384107979262563,((((69:0.2563081951510289,(56:0.19209850379167603,(85:1.1487014739801298,44:0.028141966769021032):0.07278053255357575):0.8060945140472255):1.7031111101108491,((39:0.8121885192358218,((93:0.3212728375101488,((62:1.0714318480929954,60:1.191316504799483):0.2899545086962476,((6:0.05247359857018419,70:0.08016642111032357):1.6766912016292168,(((16:0.2710703447059435,33:0.8031429205917207):0.1995980164676372,(((18:0.3137521647733166,(51:0.17919542687575435,81:0.532183293809031):0.2211625695998567):0.246579225138021,75:0.3036524716677096):0.033318344448690196,1:1.1498681036277671):0.07494081500081617):0.9030096435285131,((78:0.2623254129121191,(((10:0.44920236229616384,43:0.7348634314748519):0.07873907664155344,96:0.34611492300694446):0.025153277777600014,2:0.15288135450516482):0.2297345940020432):0.524541385566262,97:0.2718940196532993):0.009099201418466762):0.35392756731770936):0.451259495223975):0.30074234384011334):0.30932973391370044,94:0.5441203148574121):0.08944899410833962):0.33725457603508646,(98:0.24160091351265178,28:0.5224635765351042):0.02100141154922408):0.01994495313557687):0.14922615986750776,(((((((36:0.8640770396416508,34:0.8658995356131345):0.6662363133067366,(((((3:0.004470690507340169,((86:0.11696262326765483,8:0.01946380269361858):0.6409740060528389,92:0.0571161024376301):0.46434119972766363):0.2617847150605239,(26:0.4659738621260723,(53:0.465643284428392,(7:0.6556440821438937,50:0.8220993308814497):0.7847585965832209):0.2315420749447039):0.025413585150737328):0.4495480716404432,(42:0.5066019935710511,(24:0.16441848539873627,63:0.3109270929376078):0.6560804110885441):0.014610292868232122):0.3575956742976465,(31:0.1824878019912024,((((22:1.4788525055241608,29:0.10692888087324404):0.019846907901590827,100:0.07709417898746329):0.39904728032626813,48:0.20800318210979185):0.20974827021803932,40:0.16354565951487965):0.07338433934596988):0.16532098465160328):0.10854076817052816,(67:0.14973524550154016,(17:2.004017017661622,15:1.2896217656895907):0.08542696167592956):0.39569362961755905):0.18067539030754753):0.28141398648723825,(72:0.8608299131701789,(12:0.6125151396831114,(4:0.7444430438921765,(89:1.1991371200512142,(83:0.9377077160635627,64:0.6926152090937752):0.4659103333472263):0.010920711219980284):0.25725112025023833):0.5928759990406482):0.26538851971680355):0.18058095987412592,(((41:0.025317421500515414,(38:0.5177788914684798,74:0.3597803759840962):0.6467550787164872):0.6881377147089338,73:0.04319216737762743):0.3101215161205584,((54:0.1747510148532217,(99:0.051757307412777176,21:0.12022292279084779):0.040674674657433296):1.2419012411135124,(13:0.1889171299342245,(47:0.09367603200759,65:0.21405571371391408):0.01476240978756449):0.03584046369761684):0.5191383001939949):1.2315875122814122):0.043157274310656746,30:0.5900824386307497):0.3023574350702356,59:0.2564913261170587):0.3152391911170924,(((90:0.42740753167627976,((32:0.15213136211575362,20:0.8796148450215902):0.005020649011782563,61:0.5189674781385305):0.08194851288547556):0.366599449923938,49:1.5314569288514526):0.6720365004224296,((((88:0.5222590371125548,82:0.134764595276595):0.01664471612099061,19:1.3790498661692407):0.19192387207098038,(68:0.045178879969167696,(((84:0.09408886605284161,45:0.20025711686633096):0.04932228749019729,(9:0.22332416564681823,25:0.3869182826178045):0.28086443384539994):0.5084915763967757,58:0.08751403211001607):0.31424869150308843):0.03523442710528646):0.8560567696526316,11:0.8759233768998129):0.4475166845973648):0.7582328044867799):0.045436345892345464):0.27220466278639677,(14:0.08724136422846618,((5:0.9911226622901959,76:0.7920881888799691):0.3244238195974183,37:0.571334180212788):0.19443954198784086):0.572737330471933):0.21025852084237417):0.0138933027769097):1.3000766438965536):0.9822840947855207):1.121349654877636,(35:0.2639216882487281,(57:0.12944428453131707,55:0.37884342879439004):2.528599883295273):0.8653933012996609):0.44010115154763696,(77:0.3110662001971898,(((95:0.6718413317050178,80:0.1938275652432324):1.676089846004932,23:0.40600756750884504):0.39793949171176024,(71:0.2766950876509142,79:0.021716057566452562):0.755124877666371):0.0559359693561543):0.5367300018452985);",
// "(((84:0.5218034996774268,(34:0.7248627225527633,58:1.106941990535364):0.7769048713276465):0.15318323458264604,(((78:0.05152105841888144,47:0.1494774579665945):0.22485003835899597,(((74:0.6780468472435803,49:0.08067535008314763):0.2969275939747762,(((98:0.872811312943548,(((((7:0.5189876728272584,(22:0.8091373750894277,(56:0.012720215475482965,95:0.25553876262503517):0.45561093090810223):0.37156673899045645):0.5356573388243997,27:0.5002436081418524):0.4382836447406966,(((66:0.009238168486191745,(83:1.097087549661701,59:0.035452835708333685):0.18557623088284902):0.06536010382226087,((87:0.9671241326044003,((16:0.5761335171481893,(50:0.08274841507119379,29:0.20452154014734525):0.008960605574878988):0.15466580443456246,6:0.031038985470871472):0.08873693148735529):0.4895952840353077,8:0.021237372146760913):0.029158925355997622):0.6265124178594199,20:0.0562256135785657):0.14155058845923874):0.34246719151547333,52:0.15238472576944284):0.4477326621284359,(81:0.21286177861903255,(38:0.42739746705523585,((32:0.2563939476177466,(77:0.09839158118983615,((46:0.3063079776955284,44:1.0764788865361645):0.0967881563934081,(64:0.015365891691861533,(37:0.3118178978180244,(18:1.0481894711877224,28:0.5696833775511019):0.10531476820290253):0.6750406605860493):0.2389474115301009):0.4460808727445591):0.16338377374941127):0.24658159668397372,(60:0.1941521696748576,(57:0.4396173353971071,45:0.14764439636767435):0.00292563495445064):0.5835606891253495):0.03559882517394586):0.007593162074185589):0.026737592907228436):0.5043586033493836):0.08892358714303672,(94:0.32167568499503174,13:0.24981928101855377):0.3005243433313902):1.9033152658984709,10:0.04014718449395094):0.7733848279809488):0.4247027874163072,26:0.028719487023320944):0.14531692560198994):0.46251150562929144,70:0.8772022746957981):0.20189500470051325):0.6201916487517867,(99:0.3985631513944936,(91:0.1098357173489104,(89:0.438021223588029,((((100:1.2155190484767608,((67:0.9245086601024375,61:0.9062831075622751):0.8799529224721387,(((19:0.6263320501489607,73:0.32725924441884124):0.545566863874281,((41:0.14479649843775055,76:0.35888793706597066):0.4751385103081651,(4:0.43409303833894364,97:0.03209835708235764):0.06955418065813235):0.20042318205952347):0.029130580497363834,75:0.4079664634631449):0.8342155556700948):0.22195875614900462):2.8313129468083824,((93:0.0032588777353970144,71:0.3314745510222519):0.18972134813675545,2:0.9790094658343982):0.2531977560327814):0.3212678043966397,((30:0.42125777935013353,(((92:0.23264294534966057,(((53:0.046927847524238536,1:0.12892356304959043):0.9182746307439067,((88:0.45401944305455244,12:0.07726464080803563):0.03531296007223317,24:0.3408916676707102):0.07312560201814489):0.8064335697234057,25:0.06574117610264962):0.3946280484435778):1.005385484039453,((80:0.10085302357579451,69:0.5381814163465224):0.18676540711800804,65:1.0653333275501993):0.22994806447027383):1.2907938991943295,(85:0.2786810201584169,43:0.3631279015020863):0.5591846264485989):0.370092595392598):0.01894215495227325,((((((82:0.07699009030211812,(11:0.019967389878505104,72:1.1986378804373903):0.7206619673334742):0.030554149050260992,96:0.06375870671885409):0.35439458784869826,(((86:0.5215929147365141,17:0.020839534194150744):0.22913355675960778,15:0.4899882008930847):0.038287250687130125,5:1.1227346249945698):0.04397277157006885):1.0508341848072957,48:0.2514002382673981):0.02272355663925163,(((((39:0.9008867900722497,90:0.5652518469346437):0.22912950939533494,((42:0.5288472434347464,((36:0.44411185594081104,55:0.6900228670372428):0.5030156268758961,23:0.9444877196027027):0.3582066070765175):0.06253855573049583,68:0.046825650993242096):0.056714246164616844):0.11292453687408499,(((62:0.1136023999488942,63:0.5282183994014511):0.08584662186109537,40:0.15052685144149636):0.09561535958851475,((35:1.0688079540107962,3:0.9323032634016541):0.02676955012696025,(51:0.9729500524306913,14:0.04486630061190611):0.1850717537389439):0.3980136894774997):0.4815062511338304):1.5066517480878021,31:0.24181583776313254):0.17640506748799334,(9:0.5292782083049792,21:0.06130431208511533):0.44107173512354025):0.11687497159870386):0.3801421398812672,33:0.0730333043861151):0.6999711958216572):0.5822070189933886):0.6830709433466415,(79:0.7470133542750927,54:1.495896451713568):0.0044491490776241704):0.2935533886021364):0.7140255334236354):0.9678607659741312):0.0675470728377956);",
// "(((((96:0.19593269703329863,(100:0.7544992453177741,24:0.032608961543205695):0.16801348050329512):0.217143210622309,(((80:0.001517617325462517,(((60:0.02600794849135557,((97:0.08862478248090877,(19:0.22693797531889626,14:1.1295478496913884):0.7510520098391376):0.5247595039550568,(((17:0.08377600437579646,((45:0.10555732175758381,((40:0.4671746347506913,58:1.1710806360458348):0.237996954915197,15:1.6658253812263526):0.9946316920992826):0.4687370662276278,(((((34:1.7724867001334168,49:1.4163660136774823):0.09799993689364817,((10:0.3009743358225263,(35:0.04773170379064973,(5:0.17885173014166966,16:0.7106681518198553):0.37256723224254795):0.053666752632955284):0.04099274904851291,(69:1.4439333283616942,((6:0.417897809280078,(71:0.02952484466050409,(54:0.10021888774073617,(59:0.43534404270662674,68:0.5665743072448134):0.06808936191880166):0.2418981434934132):0.14471019000705265):0.1285601271762129,((67:0.3519008103609327,36:0.3524636433882793):0.3665967423770632,23:0.20115734537071395):0.31480634945479935):0.209948045622246):0.5080298795131808):0.15986622454723554):0.6632809979575685,78:0.12343860143021823):0.2738378845452156,((27:0.0430531585652596,21:0.5598360365943802):0.35362454473229255,50:0.05979211997313172):0.24690107828472208):0.015925882324924423,(25:1.1164298102831889,29:0.93183326688396):0.618319524515369):0.06703753924466804):0.13366252535421363):0.5339325939095994,((((91:0.2772414826977889,52:0.5054410298692358):0.012245961880808842,(57:0.6908957477779047,46:0.1242660536287643):0.45678045518403665):0.09689181618779052,((43:0.05875674734537528,90:0.2470157776549904):0.6470927718147148,((((28:0.7062726345696688,41:0.12619466507616672):0.5279339445274762,((1:0.16197752379023367,12:0.0936878341766092):1.0118107676816173,4:0.06371547275534928):0.06331178642963398):0.1449167827806901,63:0.1231900161960271):0.23828417706345473,20:0.0378601589214822):0.045611678615212314):0.8587293083448599):0.5420674642468035,(64:0.12296240685548643,((26:1.3055554366821998,((22:0.7135388026823565,53:0.6553794907233433):0.5635353706486713,3:0.319169046517507):0.47913944032663514):0.6382409074552566,9:0.7129345589733305):0.28744783392818807):0.17306963200514502):0.7405187895273713):0.33399387891050125,8:1.8023038546554835):0.5924678803112702):0.5445164601831172):0.007238022980884828,32:1.3128883830865705):0.13340690953951384,(((98:0.523994867853685,((((81:0.13808210286308853,70:0.24449839687404396):0.09407430160994723,(39:0.02534540096873017,((89:0.20045498756714775,(83:0.08503372751564287,73:0.050892711823579906):0.14527900345950628):0.1007758060653059,87:0.6794603298260105):0.1486142740763876):0.9339744334947284):0.33286974301166516,(94:0.5872025248543964,(((99:1.130031886234744,65:0.0334368758185879):0.05493559297608952,33:1.4714983028992503):0.05406628655084811,11:0.4915430641248135):0.367449567894214):0.3933625310757476):0.7105188300285632,(66:0.07264775680667279,56:0.37051904950175896):0.08188191033372494):1.5059372514762073):0.2558116145927736,37:0.006350741961370865):0.09672071241581914,(77:0.19759715629071817,(79:0.12149768625576307,31:1.03643365865962):0.39015421362521874):0.3143643683653279):0.42068578176042415):0.09655626349881441):0.02460400823353126,86:0.36084437426554317):0.25306700347709654,72:0.82290814071923):0.6134198703925042):0.22465805069410472,82:0.09158964290059579):0.22023484997064813,(((30:0.017154279422356034,((((75:0.49143357922184805,(76:0.28537001523743943,(47:0.13980148118310265,88:0.27558089705798616):0.6356770970597386):0.7139308280815952):0.15999410869172204,((62:1.120571203782646,38:0.44549750204481153):0.15854167840146105,84:0.977150519578527):0.6993202553505358):0.5667700294334432,48:0.30455344416372787):2.3977573269791823,((((74:0.1257844219105282,(((13:0.26846285962892447,93:0.34994306207577575):0.0668117380329103,7:1.6134247531587778):0.06431121181186761,85:1.0596553228311065):0.36041129787105053):0.08853526300450731,18:0.7823801049245711):0.11583807527929313,42:0.1460761985367034):0.3036681077746568,2:0.6345246941343614):0.14627030205926506):0.0666910115268129):0.006447006748649287,95:0.33013306497314243):0.13748684668120426,44:0.9453930357994347):1.498888465149748):0.19473209209458542,(92:0.797016413269569,((51:0.7299446646702386,55:0.8426349822491348):0.2552910980781258,61:0.316238194437501):0.3443810869076849):1.3395752515186246);",
// "((72:0.26778410529745145,16:0.9099901060236721):0.572556639307777,((((95:0.16476383779159676,88:0.043010502026175956):0.027373721572081067,(49:0.5337319419158533,((65:0.7437555987541389,(((25:0.06520160789642127,2:0.09276120923381193):0.49176437927810923,((((66:0.1640592271541621,(30:0.6310067611534267,63:0.053353057470609144):0.26775377484213436):0.6911153103988585,23:0.25886630953279965):0.23672061440817593,85:0.6811271712242579):0.88047109253489,21:0.035509833182325146):0.1334075747734209):0.045841843378031655,(91:0.7810796396220372,((((35:0.3696497234316247,((99:0.2841562566009217,((((90:0.2091892549323302,93:0.39353395516714484):0.4232206217777461,9:0.11251447994286279):0.1800308940966806,((18:0.15316383890120688,69:0.14033738659421358):0.1844231030866611,(81:0.43578009647324467,73:0.3103644319971215):0.6485283802451252):1.069977767390587):0.7129782655656447,64:0.18449503942191292):0.4062398841393562):0.29321111632547225,48:0.0575819644497404):1.2387494936054075):0.3393774536470353,55:0.11323963232805934):0.07387562694960081,10:0.036610352098754184):0.03395775996906458,((((52:0.38144323157783244,80:0.7987611381333588):0.509346114812463,((((8:0.22586585677875637,53:0.3373759652024715):0.46455286573529553,((59:0.18862934983024987,1:0.4528645015033979):0.911575221871987,(56:0.5374669476738119,(17:0.3727190362752084,28:0.12812985951756062):0.17146441607895202):0.07484172326864602):0.526560218829603):0.18246989560362858,(44:1.763903077040106,(7:0.19339905954378656,20:0.3260783550961785):1.5124538821696814):0.08241196559786013):0.14021012116354425,58:0.5590182652099926):1.4398893973627445):0.34905785033321957,((74:0.09039135729219083,78:0.20995854952773785):0.5205312190594968,(24:0.3668850104763135,4:0.5145175498867807):0.7434984872360539):1.613008904745389):0.6102349600725585,(62:0.6044803001138535,((((((96:0.5072937801557419,31:0.5794214212731033):0.2712081265480051,68:0.8506805096120594):0.5974905834493063,(57:0.10700636565501898,51:0.5384611852745366):1.395093109724256):0.5019533182615326,(((((84:0.317204073514155,(((34:0.6175993994922453,32:0.7896059037064286):0.17947059708583135,(12:0.09115432415292446,60:0.47504664684058123):0.32326923070610825):0.42274952341622196,22:0.5197693871547759):0.08704082139413138):0.0088231898539044,(76:0.3938693591201038,94:0.37462781851914506):0.7773782014408085):0.020621354256771696,(61:1.3016030558283005,47:1.5796513156674639):0.04251393298459716):0.24836295995452384,89:0.2939297831039678):0.18415456033238842,(83:0.11559894501584678,82:0.5752882967091613):0.035998368754705634):0.36381139913404903):0.5307313823214246,36:0.43493768953607015):0.5686164880390647,(((((67:0.5287168805848876,(29:0.5197748572102281,(19:1.0258515788924416,41:0.10253767736666797):0.28420174331847314):0.04755892859287769):0.3028227898993232,13:0.9761816696651464):0.14466768486781945,43:0.08487172864726):0.12262855875809464,50:0.28590290495471304):0.1514980456284012,((92:0.04957813532763389,(37:0.008643063730506828,39:0.06388175496913862):0.216977791613008):0.07449478180585478,98:1.2060833459990103):0.6268705601455311):0.5501754535605508):0.014143451145258279):1.0077776094334494):0.6480814593866651):1.8610917060340562):0.23694373939904612):0.012096770663862166):1.1499279830077036,15:0.0989446231713007):0.09782579398140889):0.31599860360596743):0.8609861644022807,71:0.044818540117461225):0.05016986176329574,(((((((27:0.9192667546083975,(26:0.0900591654577454,46:0.19904964324240204):0.17486983722355198):0.03233681665933297,38:0.20545605518965893):0.3487562172361347,33:1.829218161398523):0.04487328698402493,(97:0.30044578766684804,87:0.050700523096797845):0.06054292682925411):0.2851509325367987,((((11:0.20798266945753596,(77:0.45443260432308286,5:0.6776501421086194):1.2052960699230568):0.7021089990669229,(100:0.036641296543967306,79:0.48749059631552927):2.0951639964127784):0.874795550652919,3:0.6104497577068013):0.5962534685036696,54:0.4761069361614636):0.5090942166401735):0.7364219385434367,6:0.2317442838428354):3.588516992657726,(((14:0.33085839693868735,42:0.1359545497645671):0.16976090503858643,(((75:0.3232483960272008,86:0.03351890552827985):0.12620180003638115,40:0.6959311697498141):0.4808677568002908,70:0.03828468578590982):0.29147282423511633):0.02294399302483452,45:1.9835733808193368E-4):1.400094368080568):0.568259308159158):0.19020027103967863);",
// "(((74:0.32981227091371323,(61:0.04503308082760071,((((97:0.4415529009725141,(91:0.9798103608717241,27:0.007745587863913883):0.07942990106969461):1.2769159284618148,28:0.018357012680464635):0.07421385884018328,67:0.010988614673328456):0.20269918955757404,((54:0.33424803276537496,(53:0.6833319504672986,(((((30:0.7149204751491034,19:0.1328998609477705):0.5286161357202213,96:1.192845854661313):1.5426411326305987,((26:0.46558316702624136,(71:1.2838507070550418,(4:1.8471165386594222,(((85:0.23372337345568406,(81:0.13666514053050038,5:0.6153505091584774):0.05903098010793073):0.855841826365686,83:0.31722231652923405):0.005665445735901997,(((((33:0.9875367622781843,99:0.5035332737307725):0.025989565009862936,84:0.8338312588209604):1.29743494765373,2:0.22374875572575625):0.9465989713141041,((((88:0.27236320760881805,11:0.11896248698604683):0.39506834236145805,(94:0.6073728052553622,42:0.31804837489583715):0.6207294375416619):0.04637568555454141,92:0.30800096620746364):0.08408374712585642,14:0.055289537060422056):0.4662809740077378):1.0548882541426607,46:0.3549566157230615):0.2989958819454621):0.10442819181779406):0.11673214955737166):0.27408086791743536):0.2875395174855493,(29:0.25219919926970213,(24:0.31075253085249077,25:0.6357825103706789):0.9347007949517891):0.22222181551988918):0.04092256999724153):1.2584507979295116,(70:0.019434479033757945,((44:0.0062545030920668765,((95:1.047397366664728,(52:0.34006529784617356,((((((47:1.0400425061198164,34:0.12282787949619411):0.3003582696154856,65:1.255615266857924):0.28061019974208445,(15:0.40991306473145883,41:2.5325759764251714):0.29883074949672706):0.08869241200262046,(49:0.35348462562931315,((48:0.5952630579869904,(72:0.4514206570177155,79:1.4283242213442708):0.19871948608119716):0.27548144452960877,12:0.892984722803791):0.44455045551456385):0.296860413392102):0.1641705162239746,((8:0.46686476673516797,((57:0.24179054456661264,10:0.010065770371418425):1.3076352901359078,(62:0.3049877998249979,((((((39:0.6593336279735633,80:0.16215091538111204):0.13671373823858168,((1:0.5374889016511959,98:0.3706747368135581):0.31640704372638795,64:0.5378967882260994):0.404209319450642):0.18200789367090042,90:0.9945839246994226):0.18799319544925197,45:0.35993176598759224):0.4805001706579122,(87:0.2299316509284952,20:1.8154332925124594):0.06488606618201231):0.19510829753371395,86:0.06894472708323773):0.3373684025347101):0.0056959492998269745):0.25231491181126486):0.12181032162614702,82:0.32401681418757766):0.44585600292315597):0.8964551643568837,(9:0.8182299720384876,(((16:0.7530769694312919,58:0.25659887324514496):0.8858547153564256,((13:0.7720021319127448,21:0.1809826867886699):0.16602736502837678,32:0.4048244114238031):0.7617183505627576):0.14137643667124378,(((((75:0.6503368892661168,36:0.6681993990037137):0.07734336586798518,(23:0.947923270388612,38:0.9574489826546935):0.06515522359532677):0.36911814314844116,77:0.2856180040191263):0.9187372332781449,((18:0.34868630512202475,50:0.26007250244097385):1.1632665967474327,3:1.4886189059882884):0.7676441391769049):0.016672870838133402,66:0.1689604150664259):0.14061842083130127):1.794793960408692):0.01642820701990111):0.8368305846625717):0.16123780567983914):0.035022606357221875,((89:2.3368911898605447,((76:0.649760192186692,63:0.01916583588484677):0.18707781548642233,(78:0.12717634239246678,31:0.043787753691652664):0.20409136472281997):0.17057872169254384):1.0136507337418088,(55:0.19514947568087493,17:0.13009194136081348):0.41479966211421093):0.24402253889293046):0.133397038420215):0.11414081547087918,(59:1.7109392994601142,((7:2.82622056805058,((68:0.11521280961551272,(37:0.3961142869085892,((22:0.09245304595081436,(93:0.10854203540491394,51:0.157205892369487):0.12374149365656104):0.16768907611151262,40:0.450066379049208):0.9217230804189102):0.05866460961760889):1.4357542152895868,69:0.12691297946788405):0.1762020854379247):0.7135474208651922,35:0.22156120885860586):0.3109217127373065):1.5386593468067389):0.9799666335924311):0.07876809613355107):0.6217290638352955,43:0.008058622432427498):2.1261353999875325):0.05438125770080404):0.398909900962483,100:0.31809473558773327):1.1788777106096084):0.24757224447711934):0.3981175105591799):0.7398874872692467,(6:0.03241667394145864,60:0.1638714537340764):0.628752104994776):0.1658371918015913,(73:0.1322536908343963,56:0.29379799240773075):0.9210859556235569);"
}
// double sum = 0;
// for (int n = 2; n <= 100; n++) {
// sum += 1/(double) n;
// System.out.println("sum = " + sum);
|
package dr.inference.model;
import dr.inference.parallel.MPIServices;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import java.util.ArrayList;
import java.util.Arrays;
/**
* Represents a multi-dimensional continuous parameter.
*
* @author Alexei Drummond
* @version $Id: Parameter.java,v 1.22 2005/06/08 11:23:25 alexei Exp $
*/
public interface Parameter extends Statistic, Variable<Double> {
/**
* @param dim the index of the parameter dimension of interest
* @return the parameter's scalar value in the given dimension
*/
double getParameterValue(int dim);
/**
* @return the parameter's values (may be modified, as this is a copy)
*/
double[] getParameterValues();
/**
* sets the scalar value in the given dimension of this parameter
*
* @param dim the index of the dimension to set
* @param value the value to set
*/
void setParameterValue(int dim, double value);
/**
* sets the scalar value in the given dimensin of this parameter to val, without firing any events
*
* @param dim the index of the dimension to set
* @param value the value to set
*/
void setParameterValueQuietly(int dim, double value);
/**
* sets the scalar value in the given dimensin of this parameter to val,
* and notifies that values in all dimension have been changed
*
* @param dim the index of the dimension to set
* @param value the value to set
*/
void setParameterValueNotifyChangedAll(int dim, double value);
/**
* @return the name of this parameter
*/
String getParameterName();
/**
* adds a parameter listener that is notified when this parameter changes.
*
* @param listener the listener
*/
void addParameterListener(VariableListener listener);
/**
* removes a parameter listener.
*
* @param listener the listener
*/
void removeParameterListener(VariableListener listener);
/**
* stores the state of this parameter for subsquent restore
*/
void storeParameterValues();
/**
* restores the stored state of this parameter
*/
void restoreParameterValues();
/**
* accepts the stored state of this parameter
*/
void acceptParameterValues();
/**
* adopt the state of the source parameter
*
* @param source the parameter to adopt values from
*/
void adoptParameterValues(Parameter source);
/**
* @return true if values in all dimensions are within their bounds
*/
boolean isWithinBounds();
/**
* Can be called before store is called. If it results in new
* dimensions, then the value of the first dimension is copied into the new dimensions.
*
* @param dim new dimension
*/
void setDimension(int dim);
/**
* Adds new bounds to this parameter
*
* @param bounds to add
*/
void addBounds(Bounds<Double> bounds);
/**
* @return the intersection of all bounds added to this parameter
*/
Bounds<Double> getBounds();
/**
* Adds an extra dimension at the given index
*
* @param index Index of the dimension to add
* @param value value to save at end of new array
*/
public void addDimension(int index, double value);
/**
* Removes the specified dimension from parameter
*
* @param index Index of dimension to lose
* @return the value of the dimension removed
*/
public double removeDimension(int index);
/**
* Abstract base class for parameters
*/
public abstract class Abstract extends Statistic.Abstract implements Parameter {
// MPI IMPLEMENTATION
public void sendState(int toRank) {
double[] value = getParameterValues();
MPIServices.sendDoubleArray(value, toRank);
}
public void receiveState(int fromRank) {
final int length = getDimension();
double[] values = MPIServices.receiveDoubleArray(fromRank, length);
for (int i = 0; i < length; i++)
setParameterValueQuietly(i, values[i]);
this.fireParameterChangedEvent();
}
public int getDimension() {
return 1;
}
/**
* Fired when all dimensions of the parameter have changed
*/
public void fireParameterChangedEvent() {
fireParameterChangedEvent(-1, Parameter.ChangeType.VALUE_CHANGED);
}
/**
* Fired when a single dimension of the parameter has changed
*
* @param index which dimension changed
* @param type the type of parameter change event
*/
public void fireParameterChangedEvent(int index, Parameter.ChangeType type) {
if (listeners != null) {
for (VariableListener listener : listeners) {
listener.variableChangedEvent(this, index, type);
}
}
}
public final void addParameterListener(VariableListener listener) {
if (listeners == null) {
listeners = new ArrayList<VariableListener>();
}
listeners.add(listener);
}
public final void removeParameterListener(VariableListener listener) {
if (listeners != null) {
listeners.remove(listener);
}
}
public final String getStatisticName() {
return getParameterName();
}
public final double getStatisticValue(int dim) {
return getParameterValue(dim);
}
public void setDimension(int dim) {
throw new UnsupportedOperationException();
}
/**
* Defensively returns copy of parameter array.
*
* @return a copy of the parameter values
*/
public double[] getParameterValues() {
double[] copyOfValues = new double[getDimension()];
for (int i = 0; i < copyOfValues.length; i++) {
copyOfValues[i] = getParameterValue(i);
}
return copyOfValues;
}
public final void storeParameterValues() {
if (isValid) {
storeValues();
isValid = false;
}
}
public final void restoreParameterValues() {
if (!isValid) {
restoreValues();
isValid = true;
}
}
public final void acceptParameterValues() {
if (!isValid) {
acceptValues();
isValid = true;
}
}
public final void adoptParameterValues(Parameter source) {
adoptValues(source);
isValid = true;
}
public boolean isWithinBounds() {
Bounds<Double> bounds = getBounds();
for (int i = 0; i < getDimension(); i++) {
final double value = getParameterValue(i);
if ( value < bounds.getLowerLimit(i) || value > bounds.getUpperLimit(i)) {
return false;
}
}
return true;
}
// IMPLEMENT VARIABLE
/**
* @return the name of this variable.
*/
public final String getVariableName() {
return getParameterName();
}
public final Double getValue(int index) {
return getParameterValue(index);
}
public final void setValue(int index, Double value) {
setParameterValue(index, value);
}
public Double[] getValues() {
Double[] copyOfValues = new Double[getDimension()];
for (int i = 0; i < getDimension(); i++) {
copyOfValues[i] = getValue(i);
}
return copyOfValues;
}
/**
* @return the size of this variable - i.e. the length of the vector
*/
public final int getSize() {
return getDimension();
}
/**
* adds a parameter listener that is notified when this parameter changes.
*
* @param listener the listener
*/
public final void addVariableListener(VariableListener listener) {
addParameterListener(listener);
}
/**
* removes a parameter listener.
*
* @param listener the listener
*/
public final void removeVariableListener(VariableListener listener) {
removeParameterListener(listener);
}
/**
* stores the state of this parameter for subsquent restore
*/
public void storeVariableValues() {
storeParameterValues();
}
/**
* restores the stored state of this parameter
*/
public void restoreVariableValues() {
restoreParameterValues();
}
/**
* accepts the stored state of this parameter
*/
public void acceptVariableValues() {
acceptParameterValues();
}
protected abstract void storeValues();
protected abstract void restoreValues();
protected abstract void acceptValues();
protected abstract void adoptValues(Parameter source);
public String toString() {
StringBuffer buffer = new StringBuffer(String.valueOf(getParameterValue(0)));
Bounds bounds = null;
try {
bounds = getBounds();
} catch (NullPointerException e) {
}
final String id = getId();
if (id != null) buffer.append(id);
if (bounds != null) {
buffer.append("=[").append(String.valueOf(bounds.getLowerLimit(0)));
buffer.append(",").append(String.valueOf(bounds.getUpperLimit(0))).append("]");
}
for (int i = 1; i < getDimension(); i++) {
buffer.append(", ").append(String.valueOf(getParameterValue(i)));
if (bounds != null) {
buffer.append("[").append(String.valueOf(bounds.getLowerLimit(i)));
buffer.append(",").append(String.valueOf(bounds.getUpperLimit(i))).append("]");
}
}
return buffer.toString();
}
public Element createElement(Document document) {
throw new IllegalArgumentException();
}
private boolean isValid = true;
private ArrayList<VariableListener> listeners;
}
/**
* A class that implements the Parameter interface.
*/
class Default extends Abstract {
public Default(int dimension) {
this(dimension, 1.0);
}
public Default(double initialValue) {
values = new double[1];
values[0] = initialValue;
this.bounds = null;
}
/**
* @param id a unique id for this parameter
* @param initialValue the initial value for this parameter
* @param lower the lower bound on this parameter
* @param upper the upper bound on this parameter
*/
public Default(String id, double initialValue, double lower, double upper) {
this(initialValue);
setId(id);
addBounds(new DefaultBounds(upper, lower, 1));
}
public Default(int dimension, double initialValue) {
values = new double[dimension];
for (int i = 0; i < dimension; i++) {
values[i] = initialValue;
}
this.bounds = null;
}
public Default(double[] values) {
this.values = new double[values.length];
System.arraycopy(values, 0, this.values, 0, values.length);
}
public Default(String id, int dimension, double initialValue) {
this(dimension, initialValue);
setId(id);
}
public void addBounds(Bounds<Double> boundary) {
if (bounds == null) {
bounds = new IntersectionBounds(getDimension());
}
bounds.addBounds(boundary);
// can't change dimension after bounds are added!
//hasBeenStored = true;
}
/**
* Defensively returns copy of parameter array.
*
* @return a copy of the parameter values
*/
public final double[] getParameterValues() {
double[] copyOfValues = new double[values.length];
System.arraycopy(values, 0, copyOfValues, 0, copyOfValues.length);
return copyOfValues;
}
/**
* Do not write to the returned array directly!!
*
* @return the parameter values
*/
public final double[] inspectParameterValues() {
return values;
}
public Bounds<Double> getBounds() {
if (bounds == null) {
throw new NullPointerException(getParameterName() + " parameter: Bounds not set");
}
return bounds;
}
public String getParameterName() {
return getId();
}
/**
* Can only be called before store is called. If it results in new
* dimensions, then the value of the first dimension is copied into the new dimensions.
*/
public void setDimension(int dim) {
final int oldDim = getDimension();
if( oldDim == dim ) {
return;
}
assert storedValues == null :
"Can't change dimension after store has been called! storedValues=" +
Arrays.toString(storedValues) + " bounds=" + bounds;
double[] newValues = new double[dim];
// copy over new values
System.arraycopy(values, 0, newValues, 0, oldDim);
// fill new values with first item
for (int i = oldDim; i < dim; i++) {
newValues[i] = values[0];
}
values = newValues;
if( bounds != null ) {
assert oldDim < dim : "Can't decrease dimension when bounds are set";
for(int k = 1; k < oldDim; ++k) {
assert ((double)bounds.getLowerLimit(k) == bounds.getLowerLimit(0)) &&
((double)bounds.getUpperLimit(k) == bounds.getUpperLimit(0) ) :
"Can't change dimension when bounds are not all equal";
}
final double low = bounds.getLowerLimit(0);
final double high = bounds.getUpperLimit(0);
bounds = null;
addBounds(low, high);
}
}
/**
* Adds an extra dimension to the end of values
*
* @param value value to save at end of new array
*/
public void addDimension(int index, double value) {
assert bounds == null;
final int n = values.length;
double[] newValues = new double[n + 1];
System.arraycopy(values, 0, newValues, 0, index);
newValues[index] = value;
System.arraycopy(values, index, newValues, index + 1, n - index);
values = newValues;
fireParameterChangedEvent(index, Parameter.ChangeType.ADDED);
}
/**
* Removes a single dimension from value array
*
* @param index Index of dimension to lose
*/
public double removeDimension(int index) {
assert bounds == null;
final int n = values.length;
final double value = values[index];
final double[] newValues = new double[n - 1];
System.arraycopy(values, 0, newValues, 0, index);
System.arraycopy(values, index, newValues, index - 1, n - index);
values = newValues;
fireParameterChangedEvent(index, Parameter.ChangeType.REMOVED);
return value;
}
public void setParameterValue(int i, double val) {
values[i] = val;
fireParameterChangedEvent(i, Parameter.ChangeType.VALUE_CHANGED);
}
/**
* Sets the value of the parameter without firing a changed event.
*
* @param dim the index of the parameter dimension
* @param value the value to set
*/
public void setParameterValueQuietly(int dim, double value) {
values[dim] = value;
}
/**
* Sets the values of the parameter and notify that all values of the parameter have changed.
*
* @param i index of the value
* @param val to value to set
*/
public void setParameterValueNotifyChangedAll(int i, double val){
values[i] = val;
fireParameterChangedEvent(i, Parameter.ChangeType.ALL_VALUES_CHANGED);
}
protected final void storeValues() {
// no need to pay a price in a very common call for one-time rare usage
//hasBeenStored = true;
if (storedValues == null) {
storedValues = new double[values.length];
}
System.arraycopy(values, 0, storedValues, 0, storedValues.length);
}
protected final void restoreValues() {
//swap the arrays
double[] temp = storedValues;
storedValues = values;
values = temp;
//if (storedValues != null) {
// System.arraycopy(storedValues, 0, values, 0, values.length);
//} else throw new RuntimeException("restore called before store!");
}
/**
* Nothing to do
*/
protected final void acceptValues() {
}
protected final void adoptValues(Parameter source) {
// todo bug ? bounds not adopted?
if (getDimension() != source.getDimension()) {
throw new RuntimeException("The two parameters don't have the same number of dimensions");
}
for (int i = 0, n = getDimension(); i < n; i++) {
values[i] = source.getParameterValue(i);
}
}
private double[] values;
private double[] storedValues;
// same as !storedValues && !bounds
//private boolean hasBeenStored = false;
private IntersectionBounds bounds = null;
public void addBounds(double lower, double upper) {
addBounds(new DefaultBounds(upper, lower, getDimension()));
}
}
class DefaultBounds implements Bounds<Double> {
public DefaultBounds(double upper, double lower, int dimension) {
this.uppers = new double[dimension];
this.lowers = new double[dimension];
for (int i = 0; i < dimension; i++) {
uppers[i] = upper;
lowers[i] = lower;
}
}
// public DefaultBounds(ArrayList<java.lang.Double> upperList, ArrayList<java.lang.Double> lowerList) {
// final int length = upperList.size();
// if (length != lowerList.size()) {
// uppers = new double[length];
// lowers = new double[length];
// for (int i = 0; i < uppers.length; i++) {
// uppers[i] = upperList.get(i);
// lowers[i] = lowerList.get(i);
public DefaultBounds(double[] uppers, double[] lowers) {
if (uppers.length != lowers.length) {
throw new IllegalArgumentException("upper and lower limits must be defined on the same number of dimensions.");
}
this.uppers = uppers;
this.lowers = lowers;
}
public Double getUpperLimit(int i) {
return uppers[i];
}
public Double getLowerLimit(int i) {
return lowers[i];
}
public int getBoundsDimension() {
return uppers.length;
}
private final double[] uppers, lowers;
}
}
|
package edu.kit.informatik;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import edu.kit.informatik.literatur_system.AuthorNames;
import edu.kit.informatik.literatur_system.ConferenceArticle;
import edu.kit.informatik.literatur_system.JournalArticle;
/**
* Various utility functions
* @author JoseNote
* @version %I%, %G%
*/
public class Utilities {
/**
* TODO add doc
* @param <T> TODO add doc
* @param collections TODO add doc
* @return TODO add doc
*/
@SafeVarargs
public static <T> Collection<T> intersectCustomCollector(Collection<T>... collections) {
return Arrays.stream(collections)
.collect(intersecting());
}
private static <T, S extends Collection<T>> Collector<S, ?, Set<T>> intersecting() {
class Acc {
Set<T> result;
private void accept(S s) {
if (result == null)
result = new HashSet<>(s);
else result.retainAll(s);
}
private Acc combine(Acc other) {
if (result == null)
return other;
if (other.result != null)
result.retainAll(other.result);
return this;
}
}
return Collector.of(Acc::new, Acc::accept, Acc::combine,
acc -> acc.result == null ? Collections.emptySet() : acc.result,
Collector.Characteristics.UNORDERED);
}
/**
* TODO add doc
* @param <T> TODO add doc
* @param collections TODO add doc
* @return TODO add doc
*/
@SafeVarargs
public static <T> Collection<T> intersectMultipleRetain(Collection<T>... collections) {
return Arrays.stream(collections)
.reduce((a, b) -> {
Set<T> c = new HashSet<>(a);
c.retainAll(b);
return c;
}).orElseGet(HashSet::new);
}
/**
* TODO add doc
* @param <T> TODO add doc
* @param collections TODO add doc
* @return TODO add doc
*/
public static <T> Set<T> intersectCollection(Collection<? extends Collection<T>> collections) {
if (collections.isEmpty())
return Collections.emptySet();
Collection<T> smallest
= Collections.min(collections, Comparator.comparingInt(Collection::size));
return smallest.stream().distinct()
.filter(t -> collections.stream().allMatch(c -> c == smallest || c.contains(t)))
.collect(Collectors.toSet());
}
/**
* Concatenates multiple generic collections into one list.
* Doesn't remove repeated elements.
* @param <T> TODO add doc
* @param collections TODO add doc
* @return TODO add doc
*/
@SafeVarargs
public static <T> Collection<T> unify(
Collection<T>... collections) {
return Arrays.stream(collections)
.flatMap(Collection::stream)
.collect(Collectors.toList());
}
/**
* TODO add doc
* @param <T> TODO add doc
* @param collections TODO add doc
* @return TODO add doc
*/
@SafeVarargs
public static <T> Collection<T> unifyNoRepetition(
Collection<T>... collections) {
return Arrays.stream(collections)
.flatMap(Collection::stream)
.distinct()
.collect(Collectors.toList());
}
/**
* Joins multiple primitive type objects into a space separated string
* @param values TODO add doc
* @return TODO add doc
*/
public static String listing(
final Object... values) {
return Stream.of(values).map(String::valueOf).collect(Collectors.joining(" "));
}
/**
* TODO add doc
* @param type TODO add doc
* @param args TODO add doc
* @return TODO add doc
*/
public static IllegalArgumentException noSuch(
final Class<?> type, final Object... args) {
//TODO improve message
return new IllegalArgumentException(Stream.of(args).map(String::valueOf)
.collect(Collectors.joining(", ", "No such " + type.getSimpleName() + ": ", "")));
}
/**
* TODO add doc
* @param type TODO add doc
* @param args TODO add doc
* @return TODO add doc
*/
public static IllegalArgumentException alreadyExist(
final Class<?> type, final Object... args) {
//TODO improve message
return new IllegalArgumentException(Stream.of(args).map(String::valueOf)
.collect(Collectors.joining(", ", "exist already " + type.getSimpleName() + ": ", "")));
}
public static String formatToIEEESimplified(final int index, final JournalArticle jArticle) {
return String.format(
"[%1$] %2$, \"%3$,\" %4$, %5$.",
index,
formatToIEEESimplified(jArticle.getAuthors()),
jArticle.getArticleTitle(),
jArticle.getJournalTitle(),
jArticle.getPublicationYear());
}
public static String formatToIEEESimplified(final int index, final ConferenceArticle cArticle) {
return String.format(
"[%1$] %2$, \"%3$,\" in Proceedings of %4$, %5$, %6$.",
index,
formatToIEEESimplified(cArticle.getAuthors()),
cArticle.getArticleTitle(),
cArticle.getConferenceSeriesName(),
cArticle.getConferenceLocation(),
cArticle.getConferenceYear());
}
private static String formatToIEEESimplified(final Collection<AuthorNames> authors) {
//TODO implement
return null;
}
public static String formatToChicagoSimplified(final JournalArticle jArticle){
return String.format(
"(%1$, %2$) %3$. \"%4$.\" %5$ (%6$).",
jArticle.firstAuthorLastName(),
jArticle.getPublicationYear(),
formatToChicagoSimplified(jArticle.getAuthors()),
jArticle.getArticleTitle(),
jArticle.getJournalTitle(),
jArticle.getPublicationYear());
}
public static String formatToChicagoSimplified(final ConferenceArticle cArticle){
return String.format(
"(%1$, %2$) %3$. \"%4$.\" Paper presented at %5$, %6$, %7$.",
cArticle.firstAuthorLastName(),
cArticle.getPublicationYear(),
formatToChicagoSimplified(cArticle.getAuthors()),
cArticle.getArticleTitle(),
cArticle.getConferenceSeriesName(),
cArticle.getConferenceYear(),
cArticle.getConferenceLocation());
}
private static String formatToChicagoSimplified(final Collection<AuthorNames> authors) {
//TODO implement
return null;
}
}
|
package entity;
import java.awt.Color;
import java.awt.Graphics2D;
import main.GamePanel;
import map.Map;
import tile.Tile;
import util.EnumSide;
import util.Resources;
import util.Texture;
public class EntityPortalHoriz_Red extends Entity implements IEntityPortal_Red{
private IEntityPortal_Blue otherportal;
private EnumSide dir;
protected Texture otherimage;
public EntityPortalHoriz_Red(){
if (GamePanel.debug)
System.out.println("Creating new horizontal red portal!");
image = Resources.getEntity("PortalHoriz_Red");
otherimage = Resources.getEntity("PortalHoriz_Red").replaceColors(new Color(image.getRGB(0,0)),green );
}
public void draw(Graphics2D g){
super.draw(g);
}
public void drawOtherColor(Graphics2D g){
Texture temp = image;
image = otherimage;
super.draw(g);
image = temp;
}
@Override
public void update() {
int x = getX();
int y = getY();
Tile t= map.getTile(Map.pixelsToTiles(y), Map.pixelsToTiles(x));
if(t != null){
y +=16;
t= map.getTile(Map.pixelsToTiles(y), Map.pixelsToTiles(x));
if(t != null){
y-=18;
}
}
setY(y);
}
public boolean isHorizontal(){return true;}
public void setOtherPortal(IEntityPortal other){ otherportal = (IEntityPortal_Blue) other; }
public IEntityPortal getOtherPortal(){return otherportal;}
public void setDir(EnumSide dir){this.dir = dir;}
public EnumSide getDir(){return dir;}
}
|
package org.voltdb.iv2;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.Deque;
import java.util.HashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import org.voltcore.logging.Level;
import org.voltcore.logging.VoltLogger;
import org.voltcore.messaging.TransactionInfoBaseMessage;
import org.voltcore.utils.CoreUtils;
import org.voltcore.utils.DBBPool;
import org.voltcore.utils.EstTime;
import org.voltcore.utils.Pair;
import org.voltdb.BackendTarget;
import org.voltdb.CatalogContext;
import org.voltdb.CatalogSpecificPlanner;
import org.voltdb.DependencyPair;
import org.voltdb.HsqlBackend;
import org.voltdb.IndexStats;
import org.voltdb.LoadedProcedureSet;
import org.voltdb.MemoryStats;
import org.voltdb.ParameterSet;
import org.voltdb.PartitionDRGateway;
import org.voltdb.ProcedureRunner;
import org.voltdb.SiteProcedureConnection;
import org.voltdb.SiteSnapshotConnection;
import org.voltdb.SnapshotDataTarget;
import org.voltdb.SnapshotFormat;
import org.voltdb.SnapshotSiteProcessor;
import org.voltdb.SnapshotTableTask;
import org.voltdb.StartAction;
import org.voltdb.StatsAgent;
import org.voltdb.StatsSelector;
import org.voltdb.SystemProcedureExecutionContext;
import org.voltdb.TableStats;
import org.voltdb.TableStreamType;
import org.voltdb.TheHashinator;
import org.voltdb.TheHashinator.HashinatorConfig;
import org.voltdb.TupleStreamStateInfo;
import org.voltdb.VoltDB;
import org.voltdb.VoltProcedure.VoltAbortException;
import org.voltdb.VoltTable;
import org.voltdb.catalog.Catalog;
import org.voltdb.catalog.CatalogMap;
import org.voltdb.catalog.Cluster;
import org.voltdb.catalog.Database;
import org.voltdb.catalog.Procedure;
import org.voltdb.catalog.Table;
import org.voltdb.dtxn.SiteTracker;
import org.voltdb.dtxn.TransactionState;
import org.voltdb.dtxn.UndoAction;
import org.voltdb.exceptions.EEException;
import org.voltdb.jni.ExecutionEngine;
import org.voltdb.jni.ExecutionEngine.TaskType;
import org.voltdb.jni.ExecutionEngineIPC;
import org.voltdb.jni.ExecutionEngineJNI;
import org.voltdb.jni.MockExecutionEngine;
import org.voltdb.messaging.CompleteTransactionMessage;
import org.voltdb.messaging.FragmentTaskMessage;
import org.voltdb.messaging.Iv2InitiateTaskMessage;
import org.voltdb.rejoin.TaskLog;
import org.voltdb.sysprocs.SysProcFragmentId;
import org.voltdb.utils.CatalogUtil;
import org.voltdb.utils.CompressionService;
import org.voltdb.utils.LogKeys;
import org.voltdb.utils.MinimumRatioMaintainer;
import vanilla.java.affinity.impl.PosixJNAAffinity;
import com.google_voltpatches.common.base.Preconditions;
public class Site implements Runnable, SiteProcedureConnection, SiteSnapshotConnection
{
private static final VoltLogger hostLog = new VoltLogger("HOST");
private static final double m_taskLogReplayRatio =
Double.valueOf(System.getProperty("TASKLOG_REPLAY_RATIO", "0.6"));
// Set to false trigger shutdown.
volatile boolean m_shouldContinue = true;
// HSId of this site's initiator.
final long m_siteId;
final int m_snapshotPriority;
// Partition count is important on SPIs, MPI doesn't use it.
int m_numberOfPartitions;
// What type of EE is controlled
final BackendTarget m_backend;
// Is the site in a rejoining mode.
private final static int kStateRunning = 0;
private final static int kStateRejoining = 1;
private final static int kStateReplayingRejoin = 2;
private int m_rejoinState;
private final TaskLog m_rejoinTaskLog;
private JoinProducerBase.JoinCompletionAction m_replayCompletionAction;
// Enumerate execution sites by host.
private static final AtomicInteger siteIndexCounter = new AtomicInteger(0);
private final int m_siteIndex = siteIndexCounter.getAndIncrement();
// Manages pending tasks.
final SiteTaskerQueue m_scheduler;
/*
* There is really no legit reason to touch the initiator mailbox from the site,
* but it turns out to be necessary at startup when restoring a snapshot. The snapshot
* has the transaction id for the partition that it must continue from and it has to be
* set at all replicas of the partition.
*/
final InitiatorMailbox m_initiatorMailbox;
// Almighty execution engine and its HSQL sidekick
ExecutionEngine m_ee;
HsqlBackend m_hsql;
// Stats
final TableStats m_tableStats;
final IndexStats m_indexStats;
final MemoryStats m_memStats;
// Each execution site manages snapshot using a SnapshotSiteProcessor
private SnapshotSiteProcessor m_snapshotter;
// Current catalog
volatile CatalogContext m_context;
// Currently available procedure
volatile LoadedProcedureSet m_loadedProcedures;
// Cache the DR gateway here so that we can pass it to tasks as they are reconstructed from
// the task log
private final PartitionDRGateway m_drGateway;
private final PartitionDRGateway m_mpDrGateway;
// Current topology
int m_partitionId;
private final String m_coreBindIds;
// Need temporary access to some startup parameters in order to
// initialize EEs in the right thread.
private static class StartupConfig
{
final Catalog m_serializableCatalog;
final long m_timestamp;
StartupConfig(final Catalog catalog, final long timestamp)
{
m_serializableCatalog = catalog;
m_timestamp = timestamp;
}
}
private StartupConfig m_startupConfig = null;
// Undo token state for the corresponding EE.
public final static long kInvalidUndoToken = -1L;
long latestUndoToken = 0L;
long latestUndoTxnId = Long.MIN_VALUE;
private long getNextUndoToken(long txnId)
{
if (txnId != latestUndoTxnId) {
latestUndoTxnId = txnId;
return ++latestUndoToken;
} else {
return latestUndoToken;
}
}
/*
* Increment the undo token blindly to work around
* issues using a single token per transaction
* See ENG-5242
*/
private long getNextUndoTokenBroken() {
latestUndoTxnId = m_currentTxnId;
return ++latestUndoToken;
}
@Override
public long getLatestUndoToken()
{
return latestUndoToken;
}
// Advanced in complete transaction.
long m_lastCommittedSpHandle = 0;
long m_spHandleForSnapshotDigest = 0;
long m_currentTxnId = Long.MIN_VALUE;
long m_lastTxnTime = System.currentTimeMillis();
/*
* The version of the hashinator currently in use at the site will be consistent
* across the node because balance partitions runs everywhere and all sites update.
*
* There is a corner case with live rejoin where sites replay their log and some sites
* can pull ahead and update the global hashinator to ones further ahead causing transactions
* to not be applied correctly during replay at the other sites. To avoid this each site
* maintains a reference to it's own hashinator (which will be shared if possible).
*
* When two partition transactions come online they will diverge for pretty much the entire rebalance,
* but will converge at the end when the final hash function update is issued everywhere
*/
TheHashinator m_hashinator;
SiteProcedureConnection getSiteProcedureConnection()
{
return this;
}
/**
* SystemProcedures are "friends" with ExecutionSites and granted
* access to internal state via m_systemProcedureContext.
*/
SystemProcedureExecutionContext m_sysprocContext = new SystemProcedureExecutionContext() {
@Override
public Database getDatabase() {
return m_context.database;
}
@Override
public Cluster getCluster() {
return m_context.cluster;
}
@Override
public long getSpHandleForSnapshotDigest() {
return m_spHandleForSnapshotDigest;
}
@Override
public long getSiteId() {
return m_siteId;
}
/*
* Expensive to compute, memoize it
*/
private Boolean m_isLowestSiteId = null;
@Override
public boolean isLowestSiteId()
{
if (m_isLowestSiteId != null) {
return m_isLowestSiteId;
} else {
// FUTURE: should pass this status in at construction.
long lowestSiteId = VoltDB.instance().getSiteTrackerForSnapshot().getLowestSiteForHost(getHostId());
m_isLowestSiteId = m_siteId == lowestSiteId;
return m_isLowestSiteId;
}
}
@Override
public int getHostId() {
return CoreUtils.getHostIdFromHSId(m_siteId);
}
@Override
public int getPartitionId() {
return m_partitionId;
}
@Override
public long getCatalogCRC() {
return m_context.getCatalogCRC();
}
@Override
public int getCatalogVersion() {
return m_context.catalogVersion;
}
@Override
public byte[] getCatalogHash() {
return m_context.getCatalogHash();
}
@Override
public byte[] getDeploymentHash() {
return m_context.deploymentHash;
}
@Override
public SiteTracker getSiteTrackerForSnapshot() {
return VoltDB.instance().getSiteTrackerForSnapshot();
}
@Override
public int getNumberOfPartitions() {
return m_numberOfPartitions;
}
@Override
public void setNumberOfPartitions(int partitionCount) {
Site.this.setNumberOfPartitions(partitionCount);
}
@Override
public SiteProcedureConnection getSiteProcedureConnection()
{
return Site.this;
}
@Override
public SiteSnapshotConnection getSiteSnapshotConnection()
{
return Site.this;
}
@Override
public void updateBackendLogLevels() {
Site.this.updateBackendLogLevels();
}
@Override
public boolean updateCatalog(String diffCmds, CatalogContext context,
CatalogSpecificPlanner csp, boolean requiresSnapshotIsolation)
{
return Site.this.updateCatalog(diffCmds, context, csp, requiresSnapshotIsolation, false);
}
@Override
public TheHashinator getCurrentHashinator()
{
return m_hashinator;
}
@Override
public void updateHashinator(TheHashinator hashinator)
{
Site.this.updateHashinator(hashinator);
}
@Override
public boolean activateTableStream(final int tableId, TableStreamType type, boolean undo, byte[] predicates)
{
return m_ee.activateTableStream(tableId, type, undo ? getNextUndoToken(m_currentTxnId) : Long.MAX_VALUE, predicates);
}
@Override
public Pair<Long, int[]> tableStreamSerializeMore(int tableId, TableStreamType type,
List<DBBPool.BBContainer> outputBuffers)
{
return m_ee.tableStreamSerializeMore(tableId, type, outputBuffers);
}
@Override
public void forceAllDRNodeBuffersToDisk(final boolean nofsync)
{
m_drGateway.forceAllDRNodeBuffersToDisk(nofsync);
if (m_mpDrGateway != null) {
m_mpDrGateway.forceAllDRNodeBuffersToDisk(nofsync);
}
}
@Override
public Procedure ensureDefaultProcLoaded(String procName) {
ProcedureRunner runner = Site.this.m_loadedProcedures.getProcByName(procName);
return runner.getCatalogProcedure();
}
};
/** Create a new execution site and the corresponding EE */
public Site(
SiteTaskerQueue scheduler,
long siteId,
BackendTarget backend,
CatalogContext context,
int partitionId,
int numPartitions,
StartAction startAction,
int snapshotPriority,
InitiatorMailbox initiatorMailbox,
StatsAgent agent,
MemoryStats memStats,
String coreBindIds,
TaskLog rejoinTaskLog,
PartitionDRGateway drGateway,
PartitionDRGateway mpDrGateway)
{
m_siteId = siteId;
m_context = context;
m_partitionId = partitionId;
m_numberOfPartitions = numPartitions;
m_scheduler = scheduler;
m_backend = backend;
m_rejoinState = startAction.doesJoin() ? kStateRejoining : kStateRunning;
m_snapshotPriority = snapshotPriority;
// need this later when running in the final thread.
m_startupConfig = new StartupConfig(context.catalog, context.m_uniqueId);
m_lastCommittedSpHandle = TxnEgo.makeZero(partitionId).getTxnId();
m_spHandleForSnapshotDigest = m_lastCommittedSpHandle;
m_currentTxnId = Long.MIN_VALUE;
m_initiatorMailbox = initiatorMailbox;
m_coreBindIds = coreBindIds;
m_rejoinTaskLog = rejoinTaskLog;
m_drGateway = drGateway;
m_mpDrGateway = mpDrGateway;
m_hashinator = TheHashinator.getCurrentHashinator();
if (agent != null) {
m_tableStats = new TableStats(m_siteId);
agent.registerStatsSource(StatsSelector.TABLE,
m_siteId,
m_tableStats);
m_indexStats = new IndexStats(m_siteId);
agent.registerStatsSource(StatsSelector.INDEX,
m_siteId,
m_indexStats);
m_memStats = memStats;
} else {
// MPI doesn't need to track these stats
m_tableStats = null;
m_indexStats = null;
m_memStats = null;
}
}
/** Update the loaded procedures. */
void setLoadedProcedures(LoadedProcedureSet loadedProcedure)
{
m_loadedProcedures = loadedProcedure;
}
/** Thread specific initialization */
void initialize()
{
if (m_backend == BackendTarget.NONE) {
m_hsql = null;
m_ee = new MockExecutionEngine();
}
else if (m_backend == BackendTarget.HSQLDB_BACKEND) {
m_hsql = HsqlBackend.initializeHSQLBackend(m_siteId,
m_context);
m_ee = new MockExecutionEngine();
}
else {
m_hsql = null;
m_ee = initializeEE();
}
m_snapshotter = new SnapshotSiteProcessor(m_scheduler,
m_snapshotPriority,
new SnapshotSiteProcessor.IdlePredicate() {
@Override
public boolean idle(long now) {
return (now - 5) > m_lastTxnTime;
}
});
}
/** Create a native VoltDB execution engine */
ExecutionEngine initializeEE()
{
String hostname = CoreUtils.getHostnameOrAddress();
HashinatorConfig hashinatorConfig = TheHashinator.getCurrentConfig();
ExecutionEngine eeTemp = null;
try {
if (m_backend == BackendTarget.NATIVE_EE_JNI) {
eeTemp =
new ExecutionEngineJNI(
m_context.cluster.getRelativeIndex(),
m_siteId,
m_partitionId,
CoreUtils.getHostIdFromHSId(m_siteId),
hostname,
m_context.cluster.getDeployment().get("deployment").
getSystemsettings().get("systemsettings").getTemptablemaxsize(),
hashinatorConfig,
m_mpDrGateway != null);
}
else {
// set up the EE over IPC
eeTemp =
new ExecutionEngineIPC(
m_context.cluster.getRelativeIndex(),
m_siteId,
m_partitionId,
CoreUtils.getHostIdFromHSId(m_siteId),
hostname,
m_context.cluster.getDeployment().get("deployment").
getSystemsettings().get("systemsettings").getTemptablemaxsize(),
m_backend,
VoltDB.instance().getConfig().m_ipcPort,
hashinatorConfig,
m_mpDrGateway != null);
}
eeTemp.loadCatalog(m_startupConfig.m_timestamp, m_startupConfig.m_serializableCatalog.serialize());
eeTemp.setTimeoutLatency(m_context.cluster.getDeployment().get("deployment").
getSystemsettings().get("systemsettings").getQuerytimeout());
}
// just print error info an bail if we run into an error here
catch (final Exception ex) {
hostLog.l7dlog( Level.FATAL, LogKeys.host_ExecutionSite_FailedConstruction.name(),
new Object[] { m_siteId, m_siteIndex }, ex);
VoltDB.crashLocalVoltDB(ex.getMessage(), true, ex);
}
return eeTemp;
}
@Override
public void run()
{
Thread.currentThread().setName("Iv2ExecutionSite: " + CoreUtils.hsIdToString(m_siteId));
if (m_coreBindIds != null) {
PosixJNAAffinity.INSTANCE.setAffinity(m_coreBindIds);
}
initialize();
m_startupConfig = null; // release the serializableCatalog.
//Maintain a minimum ratio of task log (unrestricted) to live (restricted) transactions
final MinimumRatioMaintainer mrm = new MinimumRatioMaintainer(m_taskLogReplayRatio);
try {
while (m_shouldContinue) {
if (m_rejoinState == kStateRunning) {
// Normal operation blocks the site thread on the sitetasker queue.
SiteTasker task = m_scheduler.take();
if (task instanceof TransactionTask) {
m_currentTxnId = ((TransactionTask)task).getTxnId();
m_lastTxnTime = EstTime.currentTimeMillis();
}
task.run(getSiteProcedureConnection());
} else if (m_rejoinState == kStateReplayingRejoin) {
// Rejoin operation poll and try to do some catchup work. Tasks
// are responsible for logging any rejoin work they might have.
SiteTasker task = m_scheduler.poll();
boolean didWork = false;
if (task != null) {
didWork = true;
//If the task log is empty, free to execute the task
//If the mrm says we can do a restricted task, go do it
//Otherwise spin doing unrestricted tasks until we can bail out
//and do the restricted task that was polled
while (!m_rejoinTaskLog.isEmpty() && !mrm.canDoRestricted()) {
replayFromTaskLog(mrm);
}
mrm.didRestricted();
if (m_rejoinState == kStateRunning) {
task.run(getSiteProcedureConnection());
} else {
task.runForRejoin(getSiteProcedureConnection(), m_rejoinTaskLog);
}
} else {
//If there are no tasks, do task log work
didWork |= replayFromTaskLog(mrm);
}
if (!didWork) Thread.yield();
} else {
SiteTasker task = m_scheduler.take();
task.runForRejoin(getSiteProcedureConnection(), m_rejoinTaskLog);
}
}
}
catch (OutOfMemoryError e)
{
// Even though OOM should be caught by the Throwable section below,
// it sadly needs to be handled seperately. The goal here is to make
// sure VoltDB crashes.
String errmsg = "Site: " + org.voltcore.utils.CoreUtils.hsIdToString(m_siteId) +
" ran out of Java memory. " + "This node will shut down.";
VoltDB.crashLocalVoltDB(errmsg, true, e);
}
catch (Throwable t)
{
String errmsg = "Site: " + org.voltcore.utils.CoreUtils.hsIdToString(m_siteId) +
" encountered an " + "unexpected error and will die, taking this VoltDB node down.";
VoltDB.crashLocalVoltDB(errmsg, true, t);
}
try {
shutdown();
} finally {
CompressionService.releaseThreadLocal(); }
}
ParticipantTransactionState global_replay_mpTxn = null;
boolean replayFromTaskLog(MinimumRatioMaintainer mrm) throws IOException
{
// not yet time to catch-up.
if (m_rejoinState != kStateReplayingRejoin) {
return false;
}
TransactionInfoBaseMessage tibm = m_rejoinTaskLog.getNextMessage();
if (tibm != null) {
mrm.didUnrestricted();
if (tibm instanceof Iv2InitiateTaskMessage) {
Iv2InitiateTaskMessage m = (Iv2InitiateTaskMessage)tibm;
SpProcedureTask t = new SpProcedureTask(
m_initiatorMailbox, m.getStoredProcedureName(),
null, m, m_drGateway);
if (!filter(tibm)) {
m_currentTxnId = t.getTxnId();
m_lastTxnTime = EstTime.currentTimeMillis();
t.runFromTaskLog(this);
}
}
else if (tibm instanceof FragmentTaskMessage) {
FragmentTaskMessage m = (FragmentTaskMessage)tibm;
if (global_replay_mpTxn == null) {
global_replay_mpTxn = new ParticipantTransactionState(m.getTxnId(), m);
}
else if (global_replay_mpTxn.txnId != m.getTxnId()) {
VoltDB.crashLocalVoltDB("Started a MP transaction during replay before completing " +
" open transaction.", false, null);
}
TransactionTask t;
if (m.isSysProcTask()) {
t = new SysprocFragmentTask(m_initiatorMailbox, m, global_replay_mpTxn);
} else {
t = new FragmentTask(m_initiatorMailbox, m, global_replay_mpTxn);
}
if (!filter(tibm)) {
m_currentTxnId = t.getTxnId();
m_lastTxnTime = EstTime.currentTimeMillis();
t.runFromTaskLog(this);
}
}
else if (tibm instanceof CompleteTransactionMessage) {
// Needs improvement: completes for sysprocs aren't filterable as sysprocs.
// Only complete transactions that are open...
if (global_replay_mpTxn != null) {
CompleteTransactionMessage m = (CompleteTransactionMessage)tibm;
CompleteTransactionTask t = new CompleteTransactionTask(global_replay_mpTxn,
null, m, m_drGateway);
if (!m.isRestart()) {
global_replay_mpTxn = null;
}
if (!filter(tibm)) {
t.runFromTaskLog(this);
}
}
}
else {
VoltDB.crashLocalVoltDB("Can not replay message type " +
tibm + " during live rejoin. Unexpected error.",
false, null);
}
}
// exit replay being careful not to exit in the middle of a multi-partititon
// transaction. The SPScheduler doesn't have a valid transaction state for a
// partially replayed MP txn and in case of rollback the scheduler's undo token
// is wrong. Run MP txns fully kStateRejoining or fully kStateRunning.
if (m_rejoinTaskLog.isEmpty() && global_replay_mpTxn == null) {
setReplayRejoinComplete();
}
return tibm != null;
}
static boolean filter(TransactionInfoBaseMessage tibm)
{
// don't log sysproc fragments or iv2 initiate task messages.
// this is all jealously; should be refactored to ask tibm
// if it wants to be filtered for rejoin and eliminate this
// horrible introspection. This implementation mimics the
// original live rejoin code for ExecutionSite...
// Multi part AdHoc Does not need to be chacked because its an alias and runs procedure as planned.
if (tibm instanceof FragmentTaskMessage && ((FragmentTaskMessage)tibm).isSysProcTask()) {
if (!SysProcFragmentId.isDurableFragment(((FragmentTaskMessage) tibm).getPlanHash(0))) {
return true;
}
}
else if (tibm instanceof Iv2InitiateTaskMessage) {
Iv2InitiateTaskMessage itm = (Iv2InitiateTaskMessage) tibm;
//All durable sysprocs and non-sysprocs should not get filtered.
return !CatalogUtil.isDurableProc(itm.getStoredProcedureName());
}
return false;
}
public void startShutdown()
{
m_shouldContinue = false;
}
void shutdown()
{
try {
if (m_hsql != null) {
HsqlBackend.shutdownInstance();
}
if (m_ee != null) {
m_ee.release();
}
if (m_snapshotter != null) {
try {
m_snapshotter.shutdown();
} catch (InterruptedException e) {
hostLog.warn("Interrupted during shutdown", e);
}
}
if (m_rejoinTaskLog != null) {
try {
m_rejoinTaskLog.close();
} catch (IOException e) {
hostLog.error("Exception closing rejoin task log", e);
}
}
} catch (InterruptedException e) {
hostLog.warn("Interrupted shutdown execution site.", e);
}
}
// SiteSnapshotConnection interface
@Override
public void initiateSnapshots(
SnapshotFormat format,
Deque<SnapshotTableTask> tasks,
long txnId,
Map<String, Map<Integer, Pair<Long,Long>>> exportSequenceNumbers,
Map<Integer, Pair<Long, Long>> drTupleStreamInfo,
Map<Integer, Map<Integer, Pair<Long, Long>>> remoteDCLastIds) {
m_snapshotter.initiateSnapshots(m_sysprocContext, format, tasks, txnId,
exportSequenceNumbers, drTupleStreamInfo,
remoteDCLastIds);
}
/*
* Do snapshot work exclusively until there is no more. Also blocks
* until the syncing and closing of snapshot data targets has completed.
*/
@Override
public HashSet<Exception> completeSnapshotWork() throws InterruptedException {
return m_snapshotter.completeSnapshotWork(m_sysprocContext);
}
// Legacy SiteProcedureConnection needed by ProcedureRunner
@Override
public long getCorrespondingSiteId()
{
return m_siteId;
}
@Override
public int getCorrespondingPartitionId()
{
return m_partitionId;
}
@Override
public int getCorrespondingHostId()
{
return CoreUtils.getHostIdFromHSId(m_siteId);
}
@Override
public byte[] loadTable(long txnId, long spHandle, long uniqueId, String clusterName, String databaseName,
String tableName, VoltTable data,
boolean returnUniqueViolations, boolean shouldDRStream, boolean undo) throws VoltAbortException
{
Cluster cluster = m_context.cluster;
if (cluster == null) {
throw new VoltAbortException("cluster '" + clusterName + "' does not exist");
}
Database db = cluster.getDatabases().get(databaseName);
if (db == null) {
throw new VoltAbortException("database '" + databaseName + "' does not exist in cluster " + clusterName);
}
Table table = db.getTables().getIgnoreCase(tableName);
if (table == null) {
throw new VoltAbortException("table '" + tableName + "' does not exist in database " + clusterName + "." + databaseName);
}
return loadTable(txnId, spHandle, uniqueId, table.getRelativeIndex(), data, returnUniqueViolations, shouldDRStream, undo);
}
@Override
public byte[] loadTable(long txnId, long spHandle, long uniqueId, int tableId,
VoltTable data, boolean returnUniqueViolations, boolean shouldDRStream,
boolean undo)
{
// Long.MAX_VALUE is a no-op don't track undo token
return m_ee.loadTable(tableId, data, txnId,
spHandle,
m_lastCommittedSpHandle,
uniqueId,
returnUniqueViolations,
shouldDRStream,
undo ? getNextUndoToken(m_currentTxnId) : Long.MAX_VALUE);
}
@Override
public void updateBackendLogLevels()
{
m_ee.setLogLevels(org.voltdb.jni.EELoggers.getLogLevels());
}
@Override
public Map<Integer, List<VoltTable>> recursableRun(
TransactionState currentTxnState)
{
return currentTxnState.recursableRun(this);
}
@Override
public void setSpHandleForSnapshotDigest(long spHandle)
{
// During rejoin, the spHandle is updated even though the site is not executing the tasks. If it's a live
// rejoin, all logged tasks will be replayed. So the spHandle may go backward and forward again. It should
// stop at the same point after replay.
m_spHandleForSnapshotDigest = Math.max(m_spHandleForSnapshotDigest, spHandle);
}
private static void handleUndoLog(List<UndoAction> undoLog, boolean undo) {
if (undoLog == null) return;
for (final ListIterator<UndoAction> iterator = undoLog.listIterator(undoLog.size()); iterator.hasPrevious();) {
final UndoAction action = iterator.previous();
if (undo)
action.undo();
else
action.release();
}
}
private void setLastCommittedSpHandle(long spHandle)
{
if (TxnEgo.getPartitionId(m_lastCommittedSpHandle) != m_partitionId) {
VoltDB.crashLocalVoltDB("Mismatch SpHandle partitiond id " +
TxnEgo.getPartitionId(m_lastCommittedSpHandle) + ", " +
TxnEgo.getPartitionId(spHandle), true, null);
}
m_lastCommittedSpHandle = spHandle;
setSpHandleForSnapshotDigest(m_lastCommittedSpHandle);
}
@Override
public void truncateUndoLog(boolean rollback, long beginUndoToken, long spHandle, List<UndoAction> undoLog)
{
// Set the last committed txnId even if there is nothing to undo, as long as the txn is not rolling back.
if (!rollback) {
setLastCommittedSpHandle(spHandle);
}
//Any new txnid will create a new undo quantum, including the same txnid again
latestUndoTxnId = Long.MIN_VALUE;
//If the begin undo token is not set the txn never did any work so there is nothing to undo/release
if (beginUndoToken == Site.kInvalidUndoToken) return;
if (rollback) {
m_ee.undoUndoToken(beginUndoToken);
}
else {
assert(latestUndoToken != Site.kInvalidUndoToken);
assert(latestUndoToken >= beginUndoToken);
if (latestUndoToken > beginUndoToken) {
m_ee.releaseUndoToken(latestUndoToken);
}
}
handleUndoLog(undoLog, rollback);
}
@Override
public void stashWorkUnitDependencies(Map<Integer, List<VoltTable>> dependencies)
{
m_ee.stashWorkUnitDependencies(dependencies);
}
@Override
public DependencyPair executeSysProcPlanFragment(
TransactionState txnState,
Map<Integer, List<VoltTable>> dependencies, long fragmentId,
ParameterSet params)
{
ProcedureRunner runner = m_loadedProcedures.getSysproc(fragmentId);
return runner.executeSysProcPlanFragment(txnState, dependencies, fragmentId, params);
}
@Override
public HsqlBackend getHsqlBackendIfExists()
{
return m_hsql;
}
@Override
public long[] getUSOForExportTable(String signature)
{
return m_ee.getUSOForExportTable(signature);
}
@Override
public TupleStreamStateInfo getDRTupleStreamStateInfo()
{
ByteBuffer resultBuffer = ByteBuffer.wrap(m_ee.executeTask(TaskType.GET_DR_TUPLESTREAM_STATE, ByteBuffer.allocate(0)));
long partitionSequenceNumber = resultBuffer.getLong();
long partitionUniqueId = resultBuffer.getLong();
byte hasReplicatedStateInfo = resultBuffer.get();
TupleStreamStateInfo info = null;
if (hasReplicatedStateInfo != 0) {
long replicatedSequenceNumber = resultBuffer.getLong();
long replicatedUniqueId = resultBuffer.getLong();
info = new TupleStreamStateInfo(partitionSequenceNumber, partitionUniqueId,
replicatedSequenceNumber, replicatedUniqueId);
} else {
info = new TupleStreamStateInfo(partitionSequenceNumber, partitionUniqueId);
}
return info;
}
@Override
public void setDRSequenceNumbers(Long partitionSequenceNumber, Long mpSequenceNumber) {
if (partitionSequenceNumber == null && mpSequenceNumber == null) return;
ByteBuffer paramBuffer = m_ee.getParamBufferForExecuteTask(16);
paramBuffer.putLong(partitionSequenceNumber != null ? partitionSequenceNumber : Long.MIN_VALUE);
paramBuffer.putLong(mpSequenceNumber != null ? mpSequenceNumber : Long.MIN_VALUE);
m_ee.executeTask(TaskType.SET_DR_SEQUENCE_NUMBERS, paramBuffer);
}
@Override
public void toggleProfiler(int toggle)
{
m_ee.toggleProfiler(toggle);
}
@Override
public void tick()
{
long time = System.currentTimeMillis();
m_ee.tick(time, m_lastCommittedSpHandle);
statsTick(time);
}
/**
* Cache the current statistics.
*
* @param time
*/
private void statsTick(long time)
{
/*
* grab the table statistics from ee and put it into the statistics
* agent.
*/
if (m_tableStats != null) {
CatalogMap<Table> tables = m_context.database.getTables();
int[] tableIds = new int[tables.size()];
int i = 0;
for (Table table : tables) {
tableIds[i++] = table.getRelativeIndex();
}
// data to aggregate
long tupleCount = 0;
int tupleDataMem = 0;
int tupleAllocatedMem = 0;
int indexMem = 0;
int stringMem = 0;
// update table stats
final VoltTable[] s1 =
m_ee.getStats(StatsSelector.TABLE, tableIds, false, time);
if ((s1 != null) && (s1.length > 0)) {
VoltTable stats = s1[0];
assert(stats != null);
// rollup the table memory stats for this site
while (stats.advanceRow()) {
//Assert column index matches name for ENG-4092
assert(stats.getColumnName(7).equals("TUPLE_COUNT"));
tupleCount += stats.getLong(7);
assert(stats.getColumnName(8).equals("TUPLE_ALLOCATED_MEMORY"));
tupleAllocatedMem += (int) stats.getLong(8);
assert(stats.getColumnName(9).equals("TUPLE_DATA_MEMORY"));
tupleDataMem += (int) stats.getLong(9);
assert(stats.getColumnName(10).equals("STRING_DATA_MEMORY"));
stringMem += (int) stats.getLong(10);
}
stats.resetRowPosition();
m_tableStats.setStatsTable(stats);
}
else {
// the EE returned no table stats, which means there are no tables.
// Need to ensure the cached stats are cleared to reflect that
m_tableStats.resetStatsTable();
}
// update index stats
final VoltTable[] s2 =
m_ee.getStats(StatsSelector.INDEX, tableIds, false, time);
if ((s2 != null) && (s2.length > 0)) {
VoltTable stats = s2[0];
assert(stats != null);
// rollup the index memory stats for this site
while (stats.advanceRow()) {
//Assert column index matches name for ENG-4092
assert(stats.getColumnName(11).equals("MEMORY_ESTIMATE"));
indexMem += stats.getLong(11);
}
stats.resetRowPosition();
m_indexStats.setStatsTable(stats);
}
else {
// the EE returned no index stats, which means there are no indexes.
// Need to ensure the cached stats are cleared to reflect that
m_indexStats.resetStatsTable();
}
// update the rolled up memory statistics
if (m_memStats != null) {
m_memStats.eeUpdateMemStats(m_siteId,
tupleCount,
tupleDataMem,
tupleAllocatedMem,
indexMem,
stringMem,
m_ee.getThreadLocalPoolAllocations());
}
}
}
@Override
public void quiesce()
{
m_ee.quiesce(m_lastCommittedSpHandle);
}
@Override
public void exportAction(boolean syncAction,
long ackOffset,
Long sequenceNumber,
Integer partitionId, String tableSignature)
{
m_ee.exportAction(syncAction, ackOffset, sequenceNumber,
partitionId, tableSignature);
}
@Override
public VoltTable[] getStats(StatsSelector selector, int[] locators,
boolean interval, Long now)
{
return m_ee.getStats(selector, locators, interval, now);
}
@Override
public Future<?> doSnapshotWork()
{
return m_snapshotter.doSnapshotWork(m_sysprocContext, false);
}
@Override
public void startSnapshotWithTargets(Collection<SnapshotDataTarget> targets)
{
m_snapshotter.startSnapshotWithTargets(targets, System.currentTimeMillis());
}
@Override
public void setRejoinComplete(
JoinProducerBase.JoinCompletionAction replayComplete,
Map<String, Map<Integer, Pair<Long, Long>>> exportSequenceNumbers,
Map<Integer, Long> drSequenceNumbers,
boolean requireExistingSequenceNumbers) {
// transition from kStateRejoining to live rejoin replay.
// pass through this transition in all cases; if not doing
// live rejoin, will transfer to kStateRunning as usual
// as the rejoin task log will be empty.
assert(m_rejoinState == kStateRejoining);
if (replayComplete == null) {
throw new RuntimeException("Null Replay Complete Action.");
}
for (Map.Entry<String, Map<Integer, Pair<Long,Long>>> tableEntry : exportSequenceNumbers.entrySet()) {
final Table catalogTable = m_context.tables.get(tableEntry.getKey());
if (catalogTable == null) {
VoltDB.crashLocalVoltDB(
"Unable to find catalog entry for table named " + tableEntry.getKey(),
true,
null);
}
Pair<Long,Long> sequenceNumbers = tableEntry.getValue().get(m_partitionId);
if (sequenceNumbers == null) {
if (requireExistingSequenceNumbers) {
VoltDB.crashLocalVoltDB(
"Could not find export sequence numbers for partition " +
m_partitionId + " table " +
tableEntry.getKey() + " have " + exportSequenceNumbers, false, null);
} else {
sequenceNumbers = Pair.of(0L,0L);
}
}
exportAction(
true,
sequenceNumbers.getFirst().longValue(),
sequenceNumbers.getSecond(),
m_partitionId,
catalogTable.getSignature());
}
if (drSequenceNumbers != null) {
Long partitionDRSequenceNumber = drSequenceNumbers.get(m_partitionId);
Long mpDRSequenceNumber = drSequenceNumbers.get(MpInitiator.MP_INIT_PID);
setDRSequenceNumbers(partitionDRSequenceNumber, mpDRSequenceNumber);
} else if (requireExistingSequenceNumbers) {
VoltDB.crashLocalVoltDB("Could not find DR sequence number for partition " + m_partitionId);
}
m_rejoinState = kStateReplayingRejoin;
m_replayCompletionAction = replayComplete;
}
private void setReplayRejoinComplete() {
// transition out of rejoin replay to normal running state.
assert(m_rejoinState == kStateReplayingRejoin);
m_replayCompletionAction.run();
m_rejoinState = kStateRunning;
}
@Override
public VoltTable[] executePlanFragments(int numFragmentIds,
long[] planFragmentIds,
long[] inputDepIds,
Object[] parameterSets,
String[] sqlTexts,
long txnId,
long spHandle,
long uniqueId,
boolean readOnly)
throws EEException
{
return m_ee.executePlanFragments(
numFragmentIds,
planFragmentIds,
inputDepIds,
parameterSets,
sqlTexts,
txnId,
spHandle,
m_lastCommittedSpHandle,
uniqueId,
readOnly ? Long.MAX_VALUE : getNextUndoTokenBroken());
}
@Override
public ProcedureRunner getProcedureRunner(String procedureName) {
return m_loadedProcedures.getProcByName(procedureName);
}
/**
* Update the catalog. If we're the MPI, don't bother with the EE.
*/
public boolean updateCatalog(String diffCmds, CatalogContext context, CatalogSpecificPlanner csp,
boolean requiresSnapshotIsolationboolean, boolean isMPI)
{
m_context = context;
m_ee.setTimeoutLatency(m_context.cluster.getDeployment().get("deployment").
getSystemsettings().get("systemsettings").getQuerytimeout());
m_loadedProcedures.loadProcedures(m_context, m_backend, csp);
if (isMPI) {
// the rest of the work applies to sites with real EEs
return true;
}
// if a snapshot is in process, wait for it to finish
// don't bother if this isn't a schema change
if (requiresSnapshotIsolationboolean && m_snapshotter.isEESnapshotting()) {
hostLog.info(String.format("Site %d performing schema change operation must block until snapshot is locally complete.",
CoreUtils.getSiteIdFromHSId(m_siteId)));
try {
m_snapshotter.completeSnapshotWork(m_sysprocContext);
hostLog.info(String.format("Site %d locally finished snapshot. Will update catalog now.",
CoreUtils.getSiteIdFromHSId(m_siteId)));
}
catch (InterruptedException e) {
VoltDB.crashLocalVoltDB("Unexpected Interrupted Exception while finishing a snapshot for a catalog update.", true, e);
}
}
//Necessary to quiesce before updating the catalog
//so export data for the old generation is pushed to Java.
m_ee.quiesce(m_lastCommittedSpHandle);
m_ee.updateCatalog(m_context.m_uniqueId, diffCmds);
return true;
}
@Override
public void setPerPartitionTxnIds(long[] perPartitionTxnIds, boolean skipMultiPart) {
boolean foundMultipartTxnId = skipMultiPart;
boolean foundSinglepartTxnId = false;
for (long txnId : perPartitionTxnIds) {
if (TxnEgo.getPartitionId(txnId) == m_partitionId) {
if (foundSinglepartTxnId) {
VoltDB.crashLocalVoltDB(
"Found multiple transactions ids during restore for a partition", false, null);
}
foundSinglepartTxnId = true;
m_initiatorMailbox.setMaxLastSeenTxnId(txnId);
}
if (!skipMultiPart && TxnEgo.getPartitionId(txnId) == MpInitiator.MP_INIT_PID) {
if (foundMultipartTxnId) {
VoltDB.crashLocalVoltDB(
"Found multiple transactions ids during restore for a multipart txnid", false, null);
}
foundMultipartTxnId = true;
m_initiatorMailbox.setMaxLastSeenMultipartTxnId(txnId);
}
}
if (!foundMultipartTxnId) {
VoltDB.crashLocalVoltDB("Didn't find a multipart txnid on restore", false, null);
}
}
public void setNumberOfPartitions(int partitionCount)
{
m_numberOfPartitions = partitionCount;
}
@Override
public TheHashinator getCurrentHashinator() {
return m_hashinator;
}
@Override
public void updateHashinator(TheHashinator hashinator)
{
Preconditions.checkNotNull(hashinator);
m_hashinator = hashinator;
m_ee.updateHashinator(hashinator.pGetCurrentConfig());
}
/**
* For the specified list of table ids, return the number of mispartitioned rows using
* the provided hashinator and hashinator config
*/
@Override
public long[] validatePartitioning(long[] tableIds, int hashinatorType, byte[] hashinatorConfig) {
ByteBuffer paramBuffer = m_ee.getParamBufferForExecuteTask(4 + (8 * tableIds.length) + 4 + 4 + hashinatorConfig.length);
paramBuffer.putInt(tableIds.length);
for (long tableId : tableIds) {
paramBuffer.putLong(tableId);
}
paramBuffer.putInt(hashinatorType);
paramBuffer.put(hashinatorConfig);
ByteBuffer resultBuffer = ByteBuffer.wrap(m_ee.executeTask( TaskType.VALIDATE_PARTITIONING, paramBuffer));
long mispartitionedRows[] = new long[tableIds.length];
for (int ii = 0; ii < tableIds.length; ii++) {
mispartitionedRows[ii] = resultBuffer.getLong();
}
return mispartitionedRows;
}
@Override
public void setBatch(int batchIndex) {
m_ee.setBatch(batchIndex);
}
@Override
public void setProcedureName(String procedureName) {
m_ee.setProcedureName(procedureName);
}
@Override
public void notifyOfSnapshotNonce(String nonce, long snapshotSpHandle) {
m_initiatorMailbox.notifyOfSnapshotNonce(nonce, snapshotSpHandle);
}
@Override
public void applyBinaryLog(long txnId, long spHandle,
long uniqueId, byte log[]) throws EEException {
ByteBuffer paramBuffer = m_ee.getParamBufferForExecuteTask(4 + log.length);
paramBuffer.putInt(log.length);
paramBuffer.put(log);
m_ee.applyBinaryLog(paramBuffer, txnId, spHandle, m_lastCommittedSpHandle, uniqueId,
getNextUndoToken(m_currentTxnId));
}
}
|
package com.vip.saturn.job.shell;
import com.alibaba.fastjson.JSON;
import com.vip.saturn.job.SaturnJobReturn;
import com.vip.saturn.job.SaturnSystemErrorGroup;
import com.vip.saturn.job.SaturnSystemReturnCode;
import com.vip.saturn.job.basic.AbstractSaturnJob;
import com.vip.saturn.job.basic.SaturnConstant;
import com.vip.saturn.job.basic.SaturnExecutionContext;
import com.vip.saturn.job.utils.LogUtils;
import com.vip.saturn.job.utils.ScriptPidUtils;
import com.vip.saturn.job.utils.SystemEnvProperties;
import org.apache.commons.exec.CommandLine;
import org.apache.commons.exec.DefaultExecutor;
import org.apache.commons.exec.PumpStreamHandler;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
public class ScriptJobRunner {
private static Logger log = LoggerFactory.getLogger(ScriptJobRunner.class);
private static final String PREFIX_COMAND = " source /etc/profile; ";
private Map<String, String> envMap = new HashMap<>();
private AbstractSaturnJob job;
private Integer item;
private String itemValue;
private SaturnExecutionContext saturnExecutionContext;
private String jobName;
private SaturnExecuteWatchdog watchdog;
private boolean businessReturned = false;
private File saturnOutputFile;
public ScriptJobRunner(Map<String, String> envMap, AbstractSaturnJob job, Integer item, String itemValue,
SaturnExecutionContext saturnExecutionContext) {
if (envMap != null) {
this.envMap.putAll(envMap);
}
this.job = job;
this.item = item;
this.itemValue = itemValue;
this.saturnExecutionContext = saturnExecutionContext;
if (job != null) {
this.jobName = job.getJobName();
}
}
public boolean isBusinessReturned() {
return businessReturned;
}
private void createSaturnJobReturnFile() throws IOException {
if (envMap.containsKey(SystemEnvProperties.NAME_VIP_SATURN_OUTPUT_PATH)) {
String saturnOutputPath = envMap.get(SystemEnvProperties.NAME_VIP_SATURN_OUTPUT_PATH);
saturnOutputFile = new File(saturnOutputPath);
if (!saturnOutputFile.exists()) {
FileUtils.forceMkdir(saturnOutputFile.getParentFile());
if (!saturnOutputFile.createNewFile()) {
LogUtils.warn(log, jobName, "file {} already exsits.", saturnOutputPath);
}
}
}
}
private CommandLine createCommandLine(Map<String, String> env) {
StringBuilder envStringBuilder = new StringBuilder();
if (envMap != null && !envMap.isEmpty()) {
for (Entry<String, String> envEntrySet : envMap.entrySet()) {
envStringBuilder.append("export ").append(envEntrySet.getKey()).append('=')
.append(envEntrySet.getValue()).append(';');
}
}
String execParameter =
envStringBuilder.toString() + PREFIX_COMAND + ScriptPidUtils.filterEnvInCmdStr(env, itemValue);
final CommandLine cmdLine = new CommandLine("/bin/sh");
cmdLine.addArguments(new String[]{"-c", execParameter}, false);
return cmdLine;
}
private SaturnJobReturn readSaturnJobReturn() {
SaturnJobReturn tmp = null;
if (saturnOutputFile != null && saturnOutputFile.exists()) {
try {
String fileContents = FileUtils.readFileToString(saturnOutputFile);
if (fileContents != null && !fileContents.trim().isEmpty()) {
tmp = JSON.parseObject(fileContents.trim(), SaturnJobReturn.class);
businessReturned = true;
}
} catch (Throwable t) {
String template = "%s - %s read SaturnJobReturn from %s error";
LogUtils.error(log, jobName, String.format(template, jobName, item, saturnOutputFile.getAbsolutePath()),
t);
tmp = new SaturnJobReturn(SaturnSystemReturnCode.USER_FAIL, "Exception: " + t,
SaturnSystemErrorGroup.FAIL);
}
}
return tmp;
}
public synchronized SaturnExecuteWatchdog getWatchdog() {
if (watchdog == null) {
long timeoutSeconds = saturnExecutionContext.getTimetoutSeconds();
String executorName = job.getExecutorName();
if (timeoutSeconds > 0) {
watchdog = new SaturnExecuteWatchdog(timeoutSeconds * 1000, jobName, item, itemValue, executorName);
LogUtils.info(log, jobName, "Job {} enable timeout control : {} s ", jobName, timeoutSeconds);
} else { // watchdog: 5
watchdog = new SaturnExecuteWatchdog(5L * 365 * 24 * 3600 * 1000, jobName, item, itemValue,
executorName);
if (log.isDebugEnabled()) {
LogUtils.debug(log, jobName, "Job {} disable timeout control", jobName);
}
}
}
return watchdog;
}
public SaturnJobReturn runJob() {
SaturnJobReturn saturnJobReturn = null;
long timeoutSeconds = saturnExecutionContext.getTimetoutSeconds();
try {
createSaturnJobReturnFile();
saturnJobReturn = execute(timeoutSeconds);
} catch (Throwable t) {
String template = "%s - %s Exception";
LogUtils.error(log, jobName, String.format(template, jobName, item), t);
saturnJobReturn = new SaturnJobReturn(SaturnSystemReturnCode.SYSTEM_FAIL, "Exception: " + t,
SaturnSystemErrorGroup.FAIL);
} finally {
FileUtils.deleteQuietly(saturnOutputFile.getParentFile());
}
if (saturnJobReturn.getProp() == null) {
saturnJobReturn.setProp(new HashMap());
}
return saturnJobReturn;
}
private SaturnJobReturn execute(long timeoutSeconds) {
SaturnJobReturn saturnJobReturn;
ProcessOutputStream processOutputStream = new ProcessOutputStream(1);
DefaultExecutor executor = new DefaultExecutor();
PumpStreamHandler streamHandler = new PumpStreamHandler(processOutputStream);
streamHandler.setStopTimeout(timeoutSeconds * 1000); // , (commons-exec2addition)
executor.setExitValue(0);
executor.setStreamHandler(streamHandler);
executor.setWatchdog(getWatchdog());
// filter env key in execParameter. like cd ${mypath} -> cd /root/my.
Map<String, String> env = ScriptPidUtils.loadEnv();
CommandLine commandLine = createCommandLine(env);
try {
long start = System.currentTimeMillis();
LogUtils.info(log, jobName, "Begin executing {}-{} {}", jobName, item, commandLine);
int exitValue = executor.execute(commandLine, env);
long end = System.currentTimeMillis();
LogUtils.info(log, jobName, "Finish executing {}-{} {}, the exit value is {}, cost={}ms", jobName, item,
commandLine, exitValue, (end - start));
SaturnJobReturn tmp = readSaturnJobReturn();
if (tmp == null) {
tmp = new SaturnJobReturn("the exit value is " + exitValue);
}
saturnJobReturn = tmp;
} catch (Exception e) {
saturnJobReturn = handleException(timeoutSeconds, e);
} finally {
try {
// setjobLog, zkExecutionService
handleJobLog(processOutputStream.getJobLog());
processOutputStream.close();
} catch (Exception ex) {
String template = "%s-%s Error at closing output stream. Should not be concern: ";
LogUtils.error(log, jobName, String.format(template, jobName, item), ex);
}
stopStreamHandler(streamHandler);
ScriptPidUtils.removePidFile(job.getExecutorName(), jobName, "" + item, watchdog.getPid());
}
return saturnJobReturn;
}
private void handleJobLog(String jobLog) {
// jobLog1M
if (jobLog != null && jobLog.length() > SaturnConstant.MAX_JOB_LOG_DATA_LENGTH) {
LogUtils.info(log, jobName,
"As the job log exceed max length, only the previous {} characters will be reported",
SaturnConstant.MAX_JOB_LOG_DATA_LENGTH);
jobLog = jobLog.substring(0, SaturnConstant.MAX_JOB_LOG_DATA_LENGTH);
}
saturnExecutionContext.putJobLog(item, jobLog);
// saturn-job-executor.logshelljobLog
System.out.println("[" + jobName + "] msg=" + jobName + "-" + item + ":" + jobLog);// NOSONAR
LogUtils.info(log, jobName, "{}-{}: {}", jobName, item, jobLog);
}
private void stopStreamHandler(PumpStreamHandler streamHandler) {
try {
streamHandler.stop();
} catch (IOException ex) {
String template = "%s-%s Error at closing log stream. Should not be concern: ";
LogUtils.debug(log, jobName, String.format(template, jobName, item), ex);
}
}
private SaturnJobReturn handleException(long timeoutSeconds, Exception e) {
SaturnJobReturn saturnJobReturn;
String errMsg = e.toString();
if (watchdog.isTimeout()) {
saturnJobReturn = new SaturnJobReturn(SaturnSystemReturnCode.SYSTEM_FAIL,
String.format("execute job timeout(%sms), %s", timeoutSeconds * 1000, errMsg),
SaturnSystemErrorGroup.TIMEOUT);
LogUtils.error(log, jobName, "{}-{} timeout, {}", jobName, item, errMsg);
return saturnJobReturn;
}
if (watchdog.isForceStop()) {
saturnJobReturn = new SaturnJobReturn(SaturnSystemReturnCode.SYSTEM_FAIL,
"the job was forced to stop, " + errMsg, SaturnSystemErrorGroup.FAIL);
LogUtils.error(log, jobName, "{}-{} force stopped, {}", jobName, item, errMsg);
return saturnJobReturn;
}
saturnJobReturn = new SaturnJobReturn(SaturnSystemReturnCode.USER_FAIL, "Exception: " + errMsg,
SaturnSystemErrorGroup.FAIL);
LogUtils.error(log, jobName, "{}-{} Exception", jobName, item, errMsg);
return saturnJobReturn;
}
}
|
package com.exedio.cope;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.Random;
import oracle.jdbc.OracleStatement;
import oracle.jdbc.driver.OracleDriver;
import bak.pcj.IntIterator;
import bak.pcj.list.IntList;
import bak.pcj.map.IntKeyChainedHashMap;
final class OracleDatabase
extends Database
implements
DatabaseColumnTypesDefinable,
DatabaseTimestampCapable
{
static
{
try
{
Class.forName(OracleDriver.class.getName());
}
catch(ClassNotFoundException e)
{
throw new NestingRuntimeException(e);
}
}
private boolean checkedPlanTable = false;
protected OracleDatabase(final Properties properties)
{
super(properties, properties.getDatabaseUser().toUpperCase());
}
String getIntegerType(final int precision)
{
return "NUMBER(" + precision + ')';
}
String getDoubleType(final int precision)
{
return "NUMBER(" + precision + ",8)";
}
String getStringType(final int maxLength)
{
return "VARCHAR2("+(maxLength!=Integer.MAX_VALUE ? maxLength : 2000)+")";
}
public String getDateTimestampType()
{
return "TIMESTAMP(3)";
}
protected String getColumnType(final int dataType, final ResultSet resultSet) throws SQLException
{
final int columnSize = resultSet.getInt("COLUMN_SIZE");
switch(dataType)
{
case Types.DECIMAL:
final int decimalDigits = resultSet.getInt("DECIMAL_DIGITS");
if(decimalDigits>0)
return "NUMBER("+columnSize+','+decimalDigits+')';
else
return "NUMBER("+columnSize+')';
case Types.OTHER:
return "TIMESTAMP(3)";
case Types.VARCHAR:
return "VARCHAR2("+columnSize+')';
default:
return null;
}
}
private String extractConstraintName(final SQLException e, final int vendorCode, final String start, final String end)
{
if(e.getErrorCode()!=vendorCode)
return null;
final String m = e.getMessage();
if(m.startsWith(start) && m.endsWith(end))
{
final int pos = m.indexOf('.', start.length());
return m.substring(pos+1, m.length()-end.length());
}
else
return null;
}
protected String extractUniqueConstraintName(final SQLException e)
{
return extractConstraintName(e, 1, "ORA-00001: unique constraint (", ") violated\n");
}
protected String extractIntegrityConstraintName(final SQLException e)
{
return extractConstraintName(e, 2292, "ORA-02292: integrity constraint (", ") violated - child record found\n");
}
public void defineColumnTypes(final IntList columnTypes, final java.sql.Statement statement)
throws SQLException
{
//System.out.println("defineColumnTypes: "+columnTypes);
final OracleStatement s = (OracleStatement)statement;
int columnIndex = 1;
for(IntIterator i = columnTypes.iterator(); i.hasNext(); columnIndex++)
{
final int columnType = i.next();
s.defineColumnType(columnIndex, columnType);
}
}
void fillReport(final Report report)
{
super.fillReport(report);
{
final Statement bf = createStatement();
bf.append("select TABLE_NAME, LAST_ANALYZED from user_tables").
defineColumnString().
defineColumnTimestamp();
executeSQLQuery(bf, new ResultSetHandler()
{
public void run(final ResultSet resultSet) throws SQLException
{
while(resultSet.next())
{
final String tableName = resultSet.getString(1);
final Date lastAnalyzed = (Date)resultSet.getObject(2);
final ReportTable table = report.notifyExistentTable(tableName);
table.setLastAnalyzed(lastAnalyzed);
//System.out.println("EXISTS:"+tableName);
}
}
}, false);
}
{
final Statement bf = createStatement();
bf.append(
"select " +
"TABLE_NAME," +
"CONSTRAINT_NAME," +
"CONSTRAINT_TYPE," +
"SEARCH_CONDITION " +
"from user_constraints order by table_name").
defineColumnString().
defineColumnString().
defineColumnString().
defineColumnString();
executeSQLQuery(bf, new ReportConstraintHandler(report), false);
}
}
private static class ReportConstraintHandler implements ResultSetHandler
{
private final Report report;
ReportConstraintHandler(final Report report)
{
this.report = report;
}
public void run(final ResultSet resultSet) throws SQLException
{
while(resultSet.next())
{
final String tableName = resultSet.getString(1);
final String constraintName = resultSet.getString(2);
final String constraintType = resultSet.getString(3);
final ReportTable table = report.notifyExistentTable(tableName);
//System.out.println("tableName:"+tableName+" constraintName:"+constraintName+" constraintType:>"+constraintType+"<");
final ReportConstraint constraint;
if("C".equals(constraintType))
{
final String searchCondition = resultSet.getString(4);
//System.out.println("searchCondition:>"+searchCondition+"<");
constraint = table.notifyExistentCheckConstraint(constraintName, searchCondition);
}
else
{
final int type;
if("P".equals(constraintType))
type = ReportConstraint.TYPE_PRIMARY_KEY;
else if("R".equals(constraintType))
type = ReportConstraint.TYPE_FOREIGN_KEY;
else if("U".equals(constraintType))
type = ReportConstraint.TYPE_UNIQUE;
else
throw new RuntimeException(constraintType+'-'+constraintName);
constraint = table.notifyExistentConstraint(constraintName, type);
}
//System.out.println("EXISTS:"+tableName);
}
}
}
Statement getRenameColumnStatement(final String tableName,
final String oldColumnName, final String newColumnName, final String columnType)
{
final Statement bf = createStatement();
bf.append("alter table ").
append(tableName).
append(" rename column ").
append(oldColumnName).
append(" to ").
append(newColumnName);
return bf;
}
Statement getCreateColumnStatement(final String tableName, final String columnName, final String columnType)
{
final Statement bf = createStatement();
bf.append("alter table ").
append(tableName).
append(" add (").
append(columnName).
append(' ').
append(columnType).
append(')');
return bf;
}
Statement getModifyColumnStatement(final String tableName, final String columnName, final String newColumnType)
{
final Statement bf = createStatement();
bf.append("alter table ").
append(tableName).
append(" modify ").
append(columnName).
append(' ').
append(newColumnType);
return bf;
}
protected StatementInfo makeStatementInfo(final Statement statement, final Connection connection)
{
final StatementInfo result = super.makeStatementInfo(statement, connection);
final StatementInfo planInfo = makePlanInfo(statement, connection);
if(planInfo!=null)
result.addChild(planInfo);
return result;
}
private static final Random statementIDCounter = new Random();
private static final String PLAN_TABLE = "PLAN_TABLE";
private static final String STATEMENT_ID = "STATEMENT_ID";
private static final String OPERATION = "OPERATION";
private static final String OPTIONS = "OPTIONS";
private static final String OBJECT_NAME = "OBJECT_NAME";
private static final String OBJECT_INSTANCE = "OBJECT_INSTANCE";
private static final String OBJECT_TYPE = "OBJECT_TYPE";
private static final String ID = "ID";
private static final String PARENT_ID = "PARENT_ID";
private static final String STATEMENT_ID_PREFIX = "cope";
private static final HashSet skippedColumnNames = new HashSet(Arrays.asList(new String[]{
STATEMENT_ID,
OPERATION,
OPTIONS,
"TIMESTAMP",
"OBJECT_OWNER",
OBJECT_NAME,
OBJECT_INSTANCE,
OBJECT_TYPE,
ID,
PARENT_ID,
"POSITION",
}));
private StatementInfo makePlanInfo(final Statement statement, final Connection connection)
{
if(!checkedPlanTable)
{
final Statement check = createStatement();
check.append("SELECT * FROM "+PLAN_TABLE);
try
{
executeSQLQuery(check,
new ResultSetHandler()
{
public void run(final ResultSet resultSet) throws SQLException
{
}
},
false);
}
catch(NestingRuntimeException e)
{
System.err.println("cope creates oracle plan_table");
final Statement create = createStatement();
create.append(
"CREATE TABLE "+PLAN_TABLE+" (" +
STATEMENT_ID+" VARCHAR2(30), " +
"timestamp DATE, " +
"remarks VARCHAR2(80), " +
OPERATION+" VARCHAR2(30), " +
OPTIONS+" VARCHAR2(30), " +
"object_node VARCHAR2(128), " +
"object_owner VARCHAR2(30), " +
OBJECT_NAME+" VARCHAR2(30), " +
OBJECT_INSTANCE+" NUMERIC, " +
OBJECT_TYPE+" VARCHAR2(30), " +
"optimizer VARCHAR2(255), " +
"search_columns NUMERIC, " +
ID+" NUMERIC, " +
PARENT_ID+" NUMERIC, " +
"position NUMERIC, " +
"cost NUMERIC, " +
"cardinality NUMERIC, " +
"bytes NUMERIC, " +
"other_tag VARCHAR2(255), " +
"other LONG)");
try
{
executeSQLUpdate(create, 0);
}
catch(ConstraintViolationException cve)
{
throw new NestingRuntimeException(cve);
}
}
checkedPlanTable = true;
}
final String statementText = statement.getText();
if(statementText.startsWith("alter table "))
return null;
final int statementID;
synchronized(statementIDCounter)
{
statementID = Math.abs(statementIDCounter.nextInt());
}
StatementInfo root = null;
{
final Statement explainStatement = createStatement();
explainStatement.
append("explain plan set "+STATEMENT_ID+"='"+STATEMENT_ID_PREFIX).
append(statementID).
append("' for ").
append(statementText);
java.sql.Statement sqlExplainStatement = null;
try
{
sqlExplainStatement = connection.createStatement();
sqlExplainStatement.executeUpdate(explainStatement.getText());
}
catch(SQLException e)
{
throw new NestingRuntimeException(e, explainStatement.toString());
}
finally
{
if(sqlExplainStatement!=null)
{
try
{
sqlExplainStatement.close();
}
catch(SQLException e)
{
// exception is already thrown
}
}
}
}
{
final Statement fetchStatement = createStatement();
fetchStatement.
append(
"select * from "+PLAN_TABLE+' ' +
"where "+STATEMENT_ID+"='"+STATEMENT_ID_PREFIX).
append(statementID).
append("' order by "+ID);
java.sql.Statement sqlFetchStatement = null;
ResultSet sqlFetchResultSet = null;
try
{
sqlFetchStatement = connection.createStatement();
defineColumnTypes(fetchStatement.columnTypes, sqlFetchStatement);
sqlFetchResultSet = sqlFetchStatement.executeQuery(fetchStatement.getText());
final IntKeyChainedHashMap infos = new IntKeyChainedHashMap();
final ResultSetMetaData metaData = sqlFetchResultSet.getMetaData();
final int columnCount = metaData.getColumnCount();
while(sqlFetchResultSet.next())
{
final String operation = sqlFetchResultSet.getString(OPERATION);
final String options = sqlFetchResultSet.getString(OPTIONS);
final String objectName = sqlFetchResultSet.getString(OBJECT_NAME);
final int objectInstance = sqlFetchResultSet.getInt(OBJECT_INSTANCE);
final String objectType = sqlFetchResultSet.getString(OBJECT_TYPE);
final int id = sqlFetchResultSet.getInt(ID);
final Number parentID = (Number)sqlFetchResultSet.getObject(PARENT_ID);
final StringBuffer bf = new StringBuffer(operation);
if(options!=null)
bf.append(" (").append(options).append(')');
if(objectName!=null)
bf.append(" on ").append(objectName);
if(objectInstance!=0)
bf.append('[').append(objectInstance).append(']');
if(objectType!=null)
bf.append('[').append(objectType).append(']');
for(int i = 1; i<=columnCount; i++)
{
final String columnName = metaData.getColumnName(i);
if(!skippedColumnNames.contains(columnName))
{
final Object value = sqlFetchResultSet.getObject(i);
if(value!=null)
{
bf.append(' ').
append(columnName.toLowerCase()).
append('=').
append(value.toString());
}
}
}
final StatementInfo info = new StatementInfo(bf.toString());
if(parentID==null)
{
if(root!=null)
throw new RuntimeException(String.valueOf(id));
root = info;
}
else
{
final StatementInfo parent = (StatementInfo)infos.get(parentID.intValue());
if(parent==null)
throw new RuntimeException();
parent.addChild(info);
}
infos.put(id, info);
}
}
catch(SQLException e)
{
throw new NestingRuntimeException(e, fetchStatement.toString());
}
finally
{
if(sqlFetchResultSet!=null)
{
try
{
sqlFetchResultSet.close();
}
catch(SQLException e)
{
// exception is already thrown
}
}
if(sqlFetchStatement!=null)
{
try
{
sqlFetchStatement.close();
}
catch(SQLException e)
{
// exception is already thrown
}
}
}
}
if(root==null)
throw new RuntimeException();
final StatementInfo result = new StatementInfo("execution plan statement_id = " + STATEMENT_ID_PREFIX + statementID);
result.addChild(root);
//System.out.println("
//System.out.println(statement.getText());
//root.print(System.out);
//System.out.println("
return result;
}
}
|
import java.io.BufferedWriter;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import java.util.List;
import java.time.Duration;
import java.time.format.DateTimeFormatter;
import java.time.LocalDateTime;
import java.time.temporal.ChronoField;
import java.text.SimpleDateFormat;
/**
* Calculates epoch summaries from an AX3 .CWA file.
* Class/application can be called from the command line as follows:
* java AxivityAx3Epochs inputFile.CWA
*/
public class AxivityAx3Epochs
{
/**
* Parse command line args, then call method to identify & write epochs.
*/
public static void main(String[] args) {
//variables to store default parameter options
String accFile = "";
String[] functionParameters = new String[0];
String outputFile = "";
Boolean verbose = true;
int epochPeriod = 5;
DateTimeFormatter timeFormat = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.S");
double lowPassCut = 20;
double highPassCut = 0.2;
int sampleRate = 100;
//create Filters necessary for later data processing
LowpassFilter filter = new LowpassFilter(lowPassCut, sampleRate);
//BandpassFilter filter = new BandpassFilter(highPassCut, lowPassCut, sampleRate);
Boolean startEpochWholeMinute = false;
Boolean startEpochWholeSecond = false;
Boolean getStationaryBouts = false;
double stationaryStd = 0.013;
double[] swIntercept = new double[]{0.0, 0.0, 0.0};
double[] swSlope = new double[]{1.0, 1.0, 1.0};
double[] tempCoef = new double[]{0.0, 0.0, 0.0};
double meanTemp = 0.0;
int range = 8;
if (args.length < 1) {
String invalidInputMsg = "Invalid input, ";
invalidInputMsg += "please enter at least 1 parameter, e.g.\n";
invalidInputMsg += "java AxivityAx3Epochs inputFile.CWA";
System.out.println(invalidInputMsg);
System.exit(0);
} else if (args.length == 1) {
//singe parameter needs to be accFile
accFile = args[0];
outputFile = accFile.split("\\.")[0] + "Epoch.csv";
} else {
//load accFile, and also copy functionParameters (args[1:])
accFile = args[0];
outputFile = accFile.split("\\.")[0] + "Epoch.csv";
functionParameters = Arrays.copyOfRange(args, 1, args.length);
//update default values by looping through available user parameters
for (String individualParam : functionParameters) {
//individual_Parameters will look like "epoch_period:60"
String funcName = individualParam.split(":")[0];
String funcParam = individualParam.split(":")[1];
if (funcName.equals("outputFile")) {
outputFile = funcParam;
} else if (funcName.equals("verbose")) {
verbose = Boolean.parseBoolean(funcParam.toLowerCase());
} else if (funcName.equals("epochPeriod")) {
epochPeriod = Integer.parseInt(funcParam);
} else if (funcName.equals("timeFormat")) {
timeFormat = DateTimeFormatter.ofPattern(funcParam);
} else if (funcName.equals("filter")) {
if (!Boolean.parseBoolean(funcParam.toLowerCase())) {
filter = null;
}
} else if (funcName.equals("startEpochWholeMinute")) {
startEpochWholeMinute = Boolean.parseBoolean(
funcParam.toLowerCase());
} else if (funcName.equals("startEpochWholeSecond")) {
startEpochWholeSecond = Boolean.parseBoolean(
funcParam.toLowerCase());
} else if (funcName.equals("getStationaryBouts")) {
getStationaryBouts = Boolean.parseBoolean(
funcParam.toLowerCase());
epochPeriod = 10;
} else if (funcName.equals("stationaryStd")) {
stationaryStd = Double.parseDouble(funcParam);
} else if (funcName.equals("xIntercept")) {
swIntercept[0] = Double.parseDouble(funcParam);
} else if (funcName.equals("yIntercept")) {
swIntercept[1] = Double.parseDouble(funcParam);
} else if (funcName.equals("zIntercept")) {
swIntercept[2] = Double.parseDouble(funcParam);
} else if (funcName.equals("xSlope")) {
swSlope[0] = Double.parseDouble(funcParam);
} else if (funcName.equals("ySlope")) {
swSlope[1] = Double.parseDouble(funcParam);
} else if (funcName.equals("zSlope")) {
swSlope[2] = Double.parseDouble(funcParam);
} else if (funcName.equals("xTemp")) {
tempCoef[0] = Double.parseDouble(funcParam);
} else if (funcName.equals("yTemp")) {
tempCoef[1] = Double.parseDouble(funcParam);
} else if (funcName.equals("zTemp")) {
tempCoef[2] = Double.parseDouble(funcParam);
} else if (funcName.equals("meanTemp")) {
meanTemp = Double.parseDouble(funcParam);
} else if (funcName.equals("range")) {
range = Integer.parseInt(funcParam);
}
}
}
//process file if input parameters are all ok
writeCwaEpochs(accFile, outputFile, verbose, epochPeriod, timeFormat,
startEpochWholeMinute, startEpochWholeSecond, range, swIntercept,
swSlope, tempCoef, meanTemp, getStationaryBouts, stationaryStd,
filter);
}
/**
* Read CWA file blocks, then call method to write epochs from raw data.
* Epochs will be written to path "outputFile".
*/
private static void writeCwaEpochs(
String accFile,
String outputFile,
Boolean verbose,
int epochPeriod,
DateTimeFormatter timeFormat,
Boolean startEpochWholeMinute,
Boolean startEpochWholeSecond,
int range,
double[] swIntercept,
double[] swSlope,
double[] tempCoef,
double meanTemp,
Boolean getStationaryBouts,
double staticStd,
LowpassFilter filter) {
//file read/write objects
FileChannel rawAccReader = null;
BufferedWriter epochFileWriter = null;
int bufSize = 512;
ByteBuffer buf = ByteBuffer.allocate(bufSize);
try {
rawAccReader = new FileInputStream(accFile).getChannel();
epochFileWriter = new BufferedWriter(new FileWriter(outputFile));
//data block support variables
String header = "";
//epoch creation support variables
LocalDateTime epochStartTime = null;
List<Long> timeVals = new ArrayList<Long>();
List<Double> xVals = new ArrayList<Double>();
List<Double> yVals = new ArrayList<Double>();
List<Double> zVals = new ArrayList<Double>();
int[] errCounter = new int[]{0}; //store val if updated in other method
int[] clipsCounter = new int[]{0, 0}; //before, after (calibration)
String epochSummary = "";
String epochHeader = "timestamp,accPA,xMean,yMean,zMean,";
epochHeader += "xRange,yRange,zRange,xStd,yStd,zStd,temp,samples,";
epochHeader += "dataErrors,clipsBeforeCalibr,clipsAfterCalibr";
//now read every page in CWA file
int pageCount = 0;
long memSizePages = rawAccReader.size()/bufSize;
while(rawAccReader.read(buf) != -1) {
buf.flip();
buf.order(ByteOrder.LITTLE_ENDIAN);
header = (char)buf.get() + "";
header += (char)buf.get() + "";
if(header.equals("MD")) {
//Read first page (& data-block) to get time, temp,
//measureFreq & start-epoch values
//epochStartTime = parseHeader(buf,epochFileWriter);
writeLine(epochFileWriter, epochHeader);
} else if(header.equals("AX")) {
//read each individual page block, and process epochs...
epochStartTime = processDataBlockIdentifyEpochs(buf,
epochFileWriter, timeFormat, epochStartTime,
epochPeriod, timeVals, xVals, yVals, zVals,
range, errCounter, clipsCounter, swIntercept,
swSlope, tempCoef, meanTemp, getStationaryBouts,
staticStd, filter);
}
buf.clear();
//option to provide status update to user...
pageCount++;
if(verbose && pageCount % 10000 == 0)
System.out.print((pageCount*100/memSizePages) + "%\b\b\b");
}
rawAccReader.close();
epochFileWriter.close();
} catch (Exception excep) {
String errorMessage = "error reading/writing file " + outputFile;
errorMessage += ": " + excep.toString();
System.out.println(errorMessage);
System.exit(0);
}
}
private static LocalDateTime processDataBlockIdentifyEpochs(
ByteBuffer buf,
BufferedWriter epochWriter,
DateTimeFormatter timeFormat,
LocalDateTime epochStartTime,
int epochPeriod,
List<Long> timeVals,
List<Double> xVals,
List<Double> yVals,
List<Double> zVals,
int range,
int[] errCounter,
int[] clipsCounter,
double[] swIntercept,
double[] swSlope,
double[] tempCoef,
double meanTemp,
Boolean getStationaryBouts,
double staticStd,
LowpassFilter filter) {
//read block header items
long blockTimestamp = getUnsignedInt(buf,14);// buf.getInt(14);
int light = getUnsignedShort(buf,18);// buf.getShort(18);
double temperature = (getUnsignedShort(buf,20)*150.0 - 20500) / 1000;
short rateCode = (short)(buf.get(24) & 0xff);
short numAxesBPS = (short)(buf.get(25) & 0xff);
short timestampOffset = buf.getShort(26);
int sampleCount = getUnsignedShort(buf, 28);// buf.getShort(28);
//determine sample frequency
double sampleFreq = 3200 / (1 << (15 - (rateCode & 15)));
if (sampleFreq <= 0) {
sampleFreq = 1;
}
double readingGap = 1.0 / sampleFreq;
//calculate num bytes per sample...
byte bytesPerSample = 4;
int NUM_AXES_PER_SAMPLE = 3;
if ((numAxesBPS & 0x0f) == 2) {
bytesPerSample = 6; // 3*16-bit
} else if ((numAxesBPS & 0x0f) == 0) {
bytesPerSample = 4; // 3*10-bit + 2
}
//determine block start time
LocalDateTime blockTime = getCwaTimestamp((int)blockTimestamp);
float offsetStart = (float)-timestampOffset / (float)sampleFreq;
blockTime = blockTime.plusNanos(secs2Nanos(offsetStart));
//set target epoch start time of very first block
if(epochStartTime==null) {
epochStartTime=getCwaTimestamp((int)blockTimestamp);
epochStartTime = epochStartTime.plusNanos(secs2Nanos(offsetStart));
}
//raw reading values
long value = 0; // x/y/z vals
short xRaw = 0;
short yRaw = 0;
short zRaw = 0;
double x = 0.0;
double y = 0.0;
double z = 0.0;
double mcTemp = temperature-meanTemp; //mean centred temperature
//loop through each line in data block & check if it is last in epoch
//then write epoch summary to file
//an epoch will have a start+end time, and be of fixed duration
int currentPeriod;
Boolean isClipped = false;
for (int i = 0; i<sampleCount; i++) {
if (bytesPerSample == 4) {
try {
value = getUnsignedInt(buf, 30 +4*i);
} catch (Exception excep) {
errCounter[0] += 1;
System.err.println("xyz reading err: " + excep.toString());
break; //rest of block/page could be corrupted
}
// Sign-extend 10-bit values, adjust for exponents
xRaw = (short)((short)(0xffffffc0 & (value << 6)) >> (6 - ((value >> 30) & 0x03)));
yRaw = (short)((short)(0xffffffc0 & (value >> 4)) >> (6 - ((value >> 30) & 0x03)));
zRaw = (short)((short)(0xffffffc0 & (value >> 14)) >> (6 - ((value >> 30) & 0x03)));
} else if (bytesPerSample == 6) {
try {
errCounter[0] += 1;
xRaw = buf.getShort(30 + 2 * NUM_AXES_PER_SAMPLE * i + 0);
yRaw = buf.getShort(30 + 2 * NUM_AXES_PER_SAMPLE * i + 2);
zRaw = buf.getShort(30 + 2 * NUM_AXES_PER_SAMPLE * i + 4);
} catch (Exception excep) {
System.err.println("xyz reading err: " + excep.toString());
break; //rest of block/page could be corrupted
}
} else {
xRaw = 0;
yRaw = 0;
zRaw = 0;
}
x = xRaw / 256.0;
y = yRaw / 256.0;
z = zRaw / 256.0;
//check if any clipping present, use ==range as it's clipped here
if(x<=-range || x>=range || y<=-range || y>=range || z<=-range || z>=range){
clipsCounter[0] += 1;
isClipped = true;
}
//update values to software calibrated values
x = swIntercept[0] + x*swSlope[0] + mcTemp*tempCoef[0];
y = swIntercept[1] + y*swSlope[1] + mcTemp*tempCoef[1];
z = swIntercept[2] + z*swSlope[2] + mcTemp*tempCoef[2];
//check if any new clipping has happened
//find crossing of range threshold so use < rather than ==
if(x<-range || x>range || y<-range || y>range || z<-range || z>range){
if (!isClipped)
clipsCounter[1] += 1;
//drag post calibration clipped values back to range limit
if (x<-range || (isClipped && x<0))
x = -range;
else if (x>range || (isClipped && x>0))
x = range;
if (y<-range || (isClipped && y<0))
y = -range;
else if (y>range || (isClipped && y>0))
y = range;
if (z<-range || (isClipped && z<0))
z = -range;
else if (z>range || (isClipped && z>0))
z = range;
}
currentPeriod = (int)Duration.between(epochStartTime,blockTime).getSeconds();
//check for an interrupt, i.e. where break in values > 2 * epochPeriod
if (currentPeriod >= epochPeriod*2) {
int epochDiff = currentPeriod/epochPeriod;
epochStartTime = epochStartTime.plusSeconds(epochPeriod*epochDiff);
//and update how far we are into the new epoch...
currentPeriod = (int) ((blockTime.get(ChronoField.MILLI_OF_SECOND) -
epochStartTime.get(ChronoField.MILLI_OF_SECOND))/1000);
}
//check we have collected enough values to form an epoch
if (currentPeriod >= epochPeriod){
//resample values to epochSec * (intended) sampleRate
long[] timeResampled = new long[epochPeriod * (int)sampleFreq];
for(int c=0; c<timeResampled.length; c++){
timeResampled[c] = timeVals.get(0) + (10*c);
}
double[] xResampled = new double[timeResampled.length];
double[] yResampled = new double[timeResampled.length];
double[] zResampled = new double[timeResampled.length];
Resample.interpLinear(timeVals, xVals, yVals, zVals,
timeResampled, xResampled, yResampled, zResampled);
//epoch variables
String epochSummary = "";
double accPA = 0;
double xMean = 0;
double yMean = 0;
double zMean = 0;
double xRange = 0;
double yRange = 0;
double zRange = 0;
double xStd = 0;
double yStd = 0;
double zStd = 0;
//calculate raw x/y/z summary values
xMean = mean(xResampled);
yMean = mean(yResampled);
zMean = mean(zResampled);
xRange = range(xResampled);
yRange = range(yResampled);
zRange = range(zResampled);
xStd = std(xResampled, xMean);
yStd = std(yResampled, yMean);
zStd = std(zResampled, zMean);
//see if values have been abnormally stuck this epoch
double stuckVal = 1.5;
if (xStd==0 && (xMean<-stuckVal || xMean>stuckVal))
errCounter[0] += 1;
if (yStd==0 && (yMean<-stuckVal || yMean>stuckVal))
errCounter[0] += 1;
if (zStd==0 && (zMean<-stuckVal || zMean>stuckVal))
errCounter[0] += 1;
//calculate summary vector magnitude based metrics
List<Double> paVals = new ArrayList<Double>();
if(!getStationaryBouts) {
for(int c=0; c<xResampled.length; c++){
x = xResampled[c];
y = yResampled[c];
z = zResampled[c];
if(!Double.isNaN(x)) {
double vm = getVectorMagnitude(x,y,z);
paVals.add(vm-1);
}
}
//filter AvgVm-1 values
if (filter != null) {
filter.filter(paVals);
}
//run abs() or trunc() on summary variables after filtering
trunc(paVals); //abs(paVals)
//calculate mean values for each outcome metric
accPA = mean(paVals);
}
//write summary values to file
epochSummary = epochStartTime.format(timeFormat);
epochSummary += "," + accPA;
epochSummary += "," + xMean + "," + yMean + "," + zMean;
epochSummary += "," + xRange + "," + yRange + "," + zRange;
epochSummary += "," + xStd + "," + yStd + "," + zStd;
epochSummary += "," + temperature + "," + timeVals.size();
epochSummary += "," + errCounter[0];
epochSummary += "," + clipsCounter[0] + "," + clipsCounter[1];
if(!getStationaryBouts ||
(xStd<staticStd && yStd<staticStd && zStd<staticStd)) {
writeLine(epochWriter, epochSummary);
}
//reset target start time and reset arrays for next epoch
epochStartTime = epochStartTime.plusSeconds(epochPeriod);
timeVals.clear();
xVals.clear();
yVals.clear();
zVals.clear();
errCounter[0] = 0;
clipsCounter[0] = 0;
clipsCounter[1] = 0;
}
//store axes and vector magnitude values for every reading
timeVals.add(Duration.between(epochStartTime, blockTime).toMillis());
xVals.add(x);
yVals.add(y);
zVals.add(z);
isClipped = false;
//System.out.println(blockTime.format(timeFormat)) + "," + x + "," + y + "," + z);
blockTime = blockTime.plusNanos(secs2Nanos(readingGap));
}
return epochStartTime;
}
private static LocalDateTime parseHeader(
ByteBuffer buf,
BufferedWriter epochWriter) {
//todo ideally return estimate of file size...
//deviceId = getUnsignedShort(buf,4);// buf.getShort(4);
//sessionId = getUnsignedInt(buf,6);// buf.getInt(6);
//sequenceId = getUnsignedInt(buf,10);// buf.getInt(10);
long startTimestamp = getUnsignedInt(buf,13);// buf.getInt(14);
System.out.println(startTimestamp);
return getCwaTimestamp((int)startTimestamp);
//return memorySizePages;
}
//credit for next 2 methods goes to:
private static long getUnsignedInt(ByteBuffer bb, int position) {
return ((long) bb.getInt(position) & 0xffffffffL);
}
private static int getUnsignedShort(ByteBuffer bb, int position) {
return (bb.getShort(position) & 0xffff);
}
private static LocalDateTime getCwaTimestamp(int cwaTimestamp) {
LocalDateTime tStamp;
int year = (int)((cwaTimestamp >> 26) & 0x3f) + 2000;
int month = (int)((cwaTimestamp >> 22) & 0x0f);
int day = (int)((cwaTimestamp >> 17) & 0x1f);
int hours = (int)((cwaTimestamp >> 12) & 0x1f);
int mins = (int)((cwaTimestamp >> 6) & 0x3f);
int secs = (int)((cwaTimestamp ) & 0x3f);
tStamp = LocalDateTime.of(year, month, day, hours, mins, secs);
return tStamp;
}
private static double getVectorMagnitude(double x, double y, double z) {
return Math.sqrt(x*x + y*y + z*z);
}
private static void abs(List<Double> vals) {
for(int c=0; c<vals.size(); c++) {
vals.set(c, Math.abs(vals.get(c)));
}
}
private static void trunc(List<Double> vals) {
double tmp;
for(int c=0; c<vals.size(); c++) {
tmp = vals.get(c);
if(tmp < 0){
tmp = 0;
}
vals.set(c, tmp);
}
}
private static double sum(double[] vals) {
if(vals.length==0) {
return Double.NaN;
}
double sum = 0;
for(int c=0; c<vals.length; c++) {
if(!Double.isNaN(vals[c])) {
sum += vals[c];
}
}
return sum;
}
private static double mean(double[] vals) {
if(vals.length==0) {
return Double.NaN;
}
return sum(vals) / (double)vals.length;
}
private static double mean(List<Double> vals) {
if(vals.size()==0) {
return Double.NaN;
}
return sum(vals) / (double)vals.size();
}
private static double sum(List<Double> vals) {
if(vals.size()==0) {
return Double.NaN;
}
double sum = 0;
for(int c=0; c<vals.size(); c++) {
sum += vals.get(c);
}
return sum;
}
private static double range(double[] vals) {
if(vals.length==0) {
return Double.NaN;
}
double min = Double.MAX_VALUE;
double max = Double.MIN_VALUE;
for(int c=0; c<vals.length; c++) {
if (vals[c] < min) {
min = vals[c];
} else if (vals[c] > max) {
max = vals[c];
}
}
return max - min;
}
private static double std(double[] vals, double mean) {
if(vals.length==0) {
return Double.NaN;
}
double var = 0; //variance
double len = vals.length*1.0; //length
for(int c=0; c<vals.length; c++) {
if(!Double.isNaN(vals[c])) {
var += ((vals[c] - mean) * (vals[c] - mean)) / len;
}
}
return Math.sqrt(var);
}
private static void writeLine(BufferedWriter fileWriter, String line) {
try {
fileWriter.write(line + "\n");
} catch (Exception excep) {
System.out.println(excep.toString());
}
}
private static int secs2Nanos(double num){
return (int)(TimeUnit.SECONDS.toNanos(1)*num);
}
}
|
package com.vk.api.sdk.queries.upload;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.JsonSyntaxException;
import com.google.gson.stream.JsonReader;
import com.vk.api.sdk.client.ApiRequest;
import com.vk.api.sdk.client.ClientResponse;
import com.vk.api.sdk.client.VkApiClient;
import com.vk.api.sdk.exceptions.ApiException;
import com.vk.api.sdk.exceptions.ClientException;
import com.vk.api.sdk.exceptions.UploadException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.lang.reflect.Type;
public abstract class UploadQueryBuilder<T, R> extends ApiRequest<R> {
private static final Logger LOG = LoggerFactory.getLogger(UploadQueryBuilder.class);
private String filename;
private File file;
public UploadQueryBuilder(VkApiClient client, String uploadUrl, String filename, Type type) {
super(uploadUrl, client.getTransportClient(), client.getGson(), 0, type);
this.filename = filename;
}
public T file(File value) {
file = value;
return getThis();
}
protected abstract T getThis();
@Override
public R execute() throws ApiException, ClientException {
String textResponse = executeAsString();
JsonReader jsonReader = new JsonReader(new StringReader(textResponse));
JsonObject json = (JsonObject) new JsonParser().parse(jsonReader);
if (json.has("error")) {
UploadException uploadException = new UploadException(0, textResponse, "");
LOG.error("API error", uploadException);
throw uploadException;
}
try {
return getGson().fromJson(json, getResponseClass());
} catch (JsonSyntaxException e) {
LOG.error("Invalid JSON: " + textResponse, e);
throw new ClientException("Can't parse json response");
}
}
@Override
public String executeAsString() throws ClientException {
ClientResponse response;
try {
if (file != null) {
response = getClient().post(getUrl(), filename, file);
} else {
response = getClient().post(getUrl());
}
} catch (IOException e) {
LOG.error("Problems with request: " + getUrl(), e);
throw new ClientException("I/O exception");
}
if (response.getStatusCode() != 200) {
LOG.error("Invalid HTTP status " + response.getStatusCode() + " from " + getUrl());
throw new ClientException("Internal API server error");
}
return response.getContent();
}
@Override
protected String getBody() {
throw new UnsupportedOperationException("not supported");
}
}
|
package net.cpollet.itinerants.messaging.context;
import lombok.extern.slf4j.Slf4j;
import net.cpollet.itinerants.messaging.configuration.RabbitmqConfiguration;
import org.springframework.amqp.rabbit.connection.CachingConnectionFactory;
import org.springframework.amqp.rabbit.connection.ConnectionFactory;
import org.springframework.amqp.rabbit.connection.ConnectionNameStrategy;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.amqp.support.converter.Jackson2JsonMessageConverter;
import org.springframework.boot.autoconfigure.amqp.RabbitProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.net.InetAddress;
import java.net.UnknownHostException;
@Configuration
@Slf4j
public class MessagingContext {
@SuppressWarnings("Duplicates")
@Bean
ConnectionFactory connectionFactory(ConnectionNameStrategy connectionNameStrategy,
RabbitProperties rabbitProperties) {
CachingConnectionFactory connectionFactory = new CachingConnectionFactory(rabbitProperties.getHost(), rabbitProperties.getPort());
connectionFactory.setUsername(rabbitProperties.getUsername());
connectionFactory.setPassword(rabbitProperties.getPassword());
connectionFactory.setConnectionNameStrategy(connectionNameStrategy);
return connectionFactory;
}
@Bean
ConnectionNameStrategy connectionNameStrategy(String hostname, String applicationName) {
return connectionFactory -> applicationName + "@" + hostname;
}
@Bean
public RabbitTemplate rabbitTemplate(RabbitmqConfiguration rabbitmqConfiguration, ConnectionFactory connectionFactory) {
RabbitTemplate rabbitTemplate = new RabbitTemplate(connectionFactory);
rabbitTemplate.setExchange(rabbitmqConfiguration.getExchangeName());
rabbitTemplate.setMessageConverter(new Jackson2JsonMessageConverter());
return rabbitTemplate;
}
@Bean
String hostname() {
try {
return InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
log.warn("Could not determine hostname");
return "unknown";
}
}
}
|
package org.jboss.marshalling.serial;
import org.jboss.marshalling.AbstractMarshaller;
import org.jboss.marshalling.Marshaller;
import org.jboss.marshalling.AbstractMarshallerFactory;
import org.jboss.marshalling.MarshallingConfiguration;
import org.jboss.marshalling.ByteOutput;
import org.jboss.marshalling.Externalizer;
import org.jboss.marshalling.UTFUtils;
import org.jboss.marshalling.ObjectTable;
import org.jboss.marshalling.ClassTable;
import org.jboss.marshalling.util.IdentityIntMap;
import org.jboss.marshalling.util.FieldPutter;
import org.jboss.marshalling.util.Kind;
import org.jboss.marshalling.reflect.SerializableClassRegistry;
import org.jboss.marshalling.reflect.SerializableClass;
import org.jboss.marshalling.reflect.SerializableField;
import java.io.IOException;
import java.io.ObjectStreamClass;
import java.io.Serializable;
import java.io.Externalizable;
import java.io.NotSerializableException;
import java.io.InvalidClassException;
import java.io.InvalidObjectException;
import java.util.IdentityHashMap;
import java.util.Map;
import java.lang.reflect.Proxy;
import java.lang.reflect.Field;
import java.security.PrivilegedExceptionAction;
import java.security.AccessController;
import java.security.PrivilegedActionException;
public final class SerialMarshaller extends AbstractMarshaller implements Marshaller, ExtendedObjectStreamConstants {
private static final int MIN_BUFFER_SIZE = 16;
private final SerializableClassRegistry registry;
private final IdentityIntMap<Object> instanceCache;
private final IdentityIntMap<Class<?>> descriptorCache;
private final IdentityHashMap<Object, Object> replacementCache;
private final IdentityHashMap<Class<?>, Externalizer> externalizers;
private final int bufferSize;
private SerialObjectOutputStream oos;
private BlockMarshaller blockMarshaller;
private int instanceSeq;
SerialMarshaller(final AbstractMarshallerFactory marshallerFactory, final SerializableClassRegistry registry, final MarshallingConfiguration configuration) throws IOException {
super(marshallerFactory, configuration);
if (configuredVersion != 5) {
throw new IOException("Only protocol version 5 is supported for writing");
}
this.registry = registry;
instanceCache = new IdentityIntMap<Object>(configuration.getInstanceCount());
descriptorCache = new IdentityIntMap<Class<?>>(configuration.getClassCount());
replacementCache = new IdentityHashMap<Object, Object>(configuration.getInstanceCount());
externalizers = new IdentityHashMap<Class<?>, Externalizer>(configuration.getClassCount());
bufferSize = configuration.getBufferSize();
}
protected void doWriteObject(final Object orig, final boolean unshared) throws IOException {
if (orig == null) {
write(TC_NULL);
return;
}
final IdentityHashMap<Object, Object> replacementCache = this.replacementCache;
Object obj;
if (replacementCache.containsKey(orig)) {
obj = replacementCache.get(orig);
} else {
obj = orig;
}
final IdentityIntMap<Object> instanceCache = this.instanceCache;
int v;
final ObjectTable.Writer writer;
// - first check for cached objects, Classes, or ObjectStreamClass
if (! unshared && (v = instanceCache.get(obj, -1)) != -1) {
write(TC_REFERENCE);
writeInt(v + baseWireHandle);
return;
} else if ((writer = objectTable.getObjectWriter(obj)) != null) {
write(TC_OBJECTTABLE);
final int id = instanceSeq++;
if (! unshared) instanceCache.put(obj, id);
writer.writeObject(blockMarshaller, obj);
doEndBlock();
return;
} else if (obj instanceof Class) {
write(TC_CLASS);
writeNewClassDescFor((Class<?>)obj);
final int id = instanceSeq++;
if (! unshared) instanceCache.put(obj, id);
return;
} else if (obj instanceof ObjectStreamClass) {
throw new NotSerializableException(ObjectStreamClass.class.getName());
}
// - next check for replacements
// - first, check for implemented writeReplace method on the object
final SerializableClassRegistry registry = this.registry;
for (;;) {
final Class<? extends Object> objClass = obj.getClass();
final SerializableClass sc = registry.lookup(objClass);
if (!sc.hasWriteReplace()) {
break;
}
final Object replacement = sc.callWriteReplace(obj);
try {
if (replacement == null || replacement == obj || obj.getClass() == objClass) {
break;
}
} finally {
obj = replacement;
}
}
obj = objectResolver.writeReplace(obj);
// - next, if the object was replaced we do our original checks again...
if (obj != orig) {
// cache the replacement
replacementCache.put(orig, obj);
if (obj == null) {
write(TC_NULL);
return;
} else if (! unshared && (v = instanceCache.get(obj, -1)) != -1) {
write(TC_REFERENCE);
writeInt(v);
return;
} else if (obj instanceof Class) {
write(TC_CLASS);
writeNewClassDescFor((Class<?>)obj);
final int id = instanceSeq++;
if (! unshared) instanceCache.put(obj, id);
return;
} else if (obj instanceof ObjectStreamClass) {
throw new NotSerializableException(ObjectStreamClass.class.getName());
}
}
// - next check for other special types
if (obj instanceof String) {
final String string = (String) obj;
final long len = UTFUtils.getLongUTFLength(string);
if (len < 65536L) {
write(TC_STRING);
final int id = instanceSeq++;
if (! unshared) instanceCache.put(obj, id);
writeShort((int) len);
UTFUtils.writeUTFBytes(this, string);
return;
} else {
write(TC_LONGSTRING);
final int id = instanceSeq++;
if (! unshared) instanceCache.put(obj, id);
writeLong(len);
UTFUtils.writeUTFBytes(this, string);
return;
}
}
final Class<?> objClass = obj.getClass();
if (obj instanceof Enum) {
write(TC_ENUM);
final Enum theEnum = (Enum) obj;
writeClassDescFor(theEnum.getDeclaringClass());
final int id = instanceSeq++;
if (! unshared) instanceCache.put(obj, id);
doWriteObject(theEnum.name(), false);
return;
} else if (objClass.isArray()) {
write(TC_ARRAY);
writeClassDescFor(objClass);
final int id = instanceSeq++;
if (! unshared) instanceCache.put(obj, id);
if (obj instanceof byte[]) {
final byte[] bytes = (byte[]) obj;
writeInt(bytes.length);
write(bytes);
} else if (obj instanceof short[]) {
final short[] shorts = (short[]) obj;
writeInt(shorts.length);
for (short s : shorts) {
writeShort(s);
}
} else if (obj instanceof int[]) {
final int[] ints = (int[]) obj;
writeInt(ints.length);
for (int s : ints) {
writeInt(s);
}
} else if (obj instanceof long[]) {
final long[] longs = (long[]) obj;
writeInt(longs.length);
for (long s : longs) {
writeLong(s);
}
} else if (obj instanceof float[]) {
final float[] floats = (float[]) obj;
writeInt(floats.length);
for (float s : floats) {
writeFloat(s);
}
} else if (obj instanceof double[]) {
final double[] doubles = (double[]) obj;
writeInt(doubles.length);
for (double s : doubles) {
writeDouble(s);
}
} else if (obj instanceof boolean[]) {
final boolean[] booleans = (boolean[]) obj;
writeInt(booleans.length);
for (boolean s : booleans) {
writeBoolean(s);
}
} else if (obj instanceof char[]) {
final char[] chars = (char[]) obj;
writeInt(chars.length);
for (char s : chars) {
writeChar(s);
}
} else {
final Object[] objs = (Object[]) obj;
writeInt(objs.length);
for (Object o : objs) {
doWriteObject(o, false);
}
}
return;
}
Externalizer externalizer;
if (externalizers.containsKey(objClass)) {
externalizer = externalizers.get(objClass);
} else {
externalizer = classExternalizerFactory.getExternalizer(objClass);
externalizers.put(objClass, externalizer);
}
if (externalizer != null) {
final ExternalizedObject eo = new ExternalizedObject(externalizer, obj);
doWriteObject(eo, unshared);
replacementCache.put(obj, eo);
return;
} else if (obj instanceof Externalizable) {
write(TC_OBJECT);
writeClassDescFor(objClass);
final int id = instanceSeq++;
if (! unshared) instanceCache.put(obj, id);
final Externalizable externalizable = (Externalizable) obj;
externalizable.writeExternal(blockMarshaller);
doEndBlock();
return;
} else if (obj instanceof Serializable) {
write(TC_OBJECT);
writeClassDescFor(objClass);
final int id = instanceSeq++;
if (! unshared) instanceCache.put(obj, id);
writeSerialData(objClass, obj);
return;
} else {
throw new NotSerializableException(objClass.getName());
}
}
private void writeSerialData(Class<?> objClass, Object obj) throws IOException {
final Class<?> superClass = objClass.getSuperclass();
if (superClass != null && Serializable.class.isAssignableFrom(objClass)) {
writeSerialData(superClass, obj);
}
final SerializableClass sc = registry.lookup(objClass);
if (sc.hasWriteObject()) {
final SerialObjectOutputStream oos = getObjectOutputStream();
final Object oldObj = oos.saveCurrentObject(obj);
final SerializableClass oldSc = oos.saveCurrentSerializableClass(sc);
final Map<String,FieldPutter> map = oos.saveCurrentFieldMap();
final SerialObjectOutputStream.State oldState = oos.saveState();
try {
sc.callWriteObject(obj, oos);
} finally {
oos.setCurrentObject(oldObj);
oos.setCurrentSerializableClass(oldSc);
oos.setCurrentFieldMap(map);
oos.restoreState(oldState);
}
doEndBlock();
} else {
doWriteFields(sc, obj);
}
}
private final PrivilegedExceptionAction<SerialObjectOutputStream> createObjectOutputStreamAction = new PrivilegedExceptionAction<SerialObjectOutputStream>() {
public SerialObjectOutputStream run() throws IOException {
return new SerialObjectOutputStream(SerialMarshaller.this, blockMarshaller);
}
};
private SerialObjectOutputStream createObjectOutputStream() throws IOException {
try {
return AccessController.doPrivileged(createObjectOutputStreamAction);
} catch (PrivilegedActionException e) {
throw (IOException) e.getCause();
}
}
private SerialObjectOutputStream getObjectOutputStream() throws IOException {
if (oos == null) {
oos = createObjectOutputStream();
}
return oos;
}
protected void doWriteFields(final SerializableClass info, final Object obj) throws IOException {
final SerializableField[] serializableFields = info.getFields();
for (SerializableField serializableField : serializableFields) {
try {
final Field field = serializableField.getField();
switch (serializableField.getKind()) {
case BOOLEAN: {
writeBoolean(field.getBoolean(obj));
break;
}
case BYTE: {
writeByte(field.getByte(obj));
break;
}
case SHORT: {
writeShort(field.getShort(obj));
break;
}
case INT: {
writeInt(field.getInt(obj));
break;
}
case CHAR: {
writeChar(field.getChar(obj));
break;
}
case LONG: {
writeLong(field.getLong(obj));
break;
}
case DOUBLE: {
writeDouble(field.getDouble(obj));
break;
}
case FLOAT: {
writeFloat(field.getFloat(obj));
break;
}
}
} catch (IllegalAccessException e) {
final InvalidObjectException ioe = new InvalidObjectException("Unexpected illegal access exception");
ioe.initCause(e);
throw ioe;
}
}
for (SerializableField serializableField : serializableFields) {
try {
final Field field = serializableField.getField();
Kind i = serializableField.getKind();
if (i == Kind.OBJECT) {
doWriteObject(field.get(obj), serializableField.isUnshared());
}
} catch (IllegalAccessException e) {
final InvalidObjectException ioe = new InvalidObjectException("Unexpected illegal access exception");
ioe.initCause(e);
throw ioe;
}
}
}
private void writeClassDescFor(final Class<?> forClass) throws IOException {
if (forClass == null) {
write(TC_NULL);
} else {
final int id = descriptorCache.get(forClass, -1);
if (id == -1) {
writeNewClassDescFor(forClass);
} else {
write(TC_REFERENCE);
writeInt(id + baseWireHandle);
}
}
}
private void writeNewClassDescFor(final Class<?> forClass) throws IOException {
if (Proxy.isProxyClass(forClass)) {
writeNewProxyClassDesc(forClass);
} else {
writeNewPlainClassDesc(forClass);
}
}
private void writeNewProxyClassDesc(final Class<?> forClass) throws IOException {
write(TC_PROXYCLASSDESC);
descriptorCache.put(forClass, instanceSeq++);
final String[] names = classResolver.getProxyInterfaces(forClass);
writeInt(names.length);
for (String name : names) {
writeUTF(name);
}
classResolver.annotateProxyClass(blockMarshaller, forClass);
doEndBlock();
writeClassDescFor(forClass.getSuperclass());
}
private void writeNewPlainClassDesc(final Class<?> forClass) throws IOException {
final ClassTable.Writer writer = classTable.getClassWriter(forClass);
if (writer != null) {
write(TC_CLASSTABLEDESC);
descriptorCache.put(forClass, instanceSeq++);
writer.writeClass(blockMarshaller, forClass);
doEndBlock();
return;
}
write(TC_CLASSDESC);
writeUTF(classResolver.getClassName(forClass));
descriptorCache.put(forClass, instanceSeq++);
if (forClass.isEnum()) {
writeLong(0L);
write(SC_SERIALIZABLE | SC_ENUM);
writeShort(0);
} else if (Serializable.class.isAssignableFrom(forClass)) {
final SerializableClass sc = registry.lookup(forClass);
final long svu = sc.getEffectiveSerialVersionUID();
writeLong(svu);
if (Externalizable.class.isAssignableFrom(forClass)) {
// todo: add a protocol_1 option?
write(SC_EXTERNALIZABLE + SC_BLOCK_DATA);
writeShort(0);
} else {
if (sc.hasWriteObject()) {
write(SC_WRITE_METHOD + SC_SERIALIZABLE);
} else {
write(SC_SERIALIZABLE);
}
final SerializableField[] fields = sc.getFields();
writeShort(fields.length);
// first write primitive fields, then object fields
for (SerializableField field : fields) {
final Kind kind = field.getKind();
final String name = field.getName();
final Class<?> type;
try {
type = field.getType();
} catch (ClassNotFoundException e) {
// not possible
throw new InvalidClassException(forClass.getName(), "Field " + name + "'s class was not found");
}
if (kind != Kind.OBJECT) {
write(primitives.get(type, -1));
writeUTF(name);
}
}
for (SerializableField field : fields) {
final Kind kind = field.getKind();
final String name = field.getName();
final Class<?> type;
try {
type = field.getType();
} catch (ClassNotFoundException e) {
// not possible
throw new InvalidClassException(forClass.getName(), "Field " + name + "'s class was not found");
}
if (kind == Kind.OBJECT) {
final String signature = getSignature(type).intern();
write(signature.charAt(0));
writeUTF(name);
writeObject(signature);
}
}
}
} else {
writeLong(0L);
write(0);
writeShort(0);
}
classResolver.annotateClass(blockMarshaller, forClass);
doEndBlock();
final Class<?> sc = forClass.getSuperclass();
if (Serializable.class.isAssignableFrom(sc) && ! forClass.isEnum()) {
writeClassDescFor(sc);
} else {
write(TC_NULL);
}
}
private void doEndBlock() throws IOException {
blockMarshaller.flush();
write(TC_ENDBLOCKDATA);
}
private static final IdentityIntMap<Class<?>> primitives;
static {
primitives = new IdentityIntMap<Class<?>>(32);
primitives.put(byte.class, 'B');
primitives.put(char.class, 'C');
primitives.put(double.class, 'D');
primitives.put(float.class, 'F');
primitives.put(int.class, 'I');
primitives.put(long.class, 'J');
primitives.put(short.class, 'S');
primitives.put(boolean.class, 'Z');
primitives.put(void.class, 'V');
}
private static String getSignature(final Class<?> type) {
final int id;
if ((id = primitives.get(type, -1)) != -1) {
return Character.toString((char)id);
} else if (type.isArray()) {
return "[" + getSignature(type.getComponentType());
} else {
return "L" + type.getName().replace('.', '/') + ";";
}
}
public void clearInstanceCache() throws IOException {
instanceCache.clear();
descriptorCache.clear();
replacementCache.clear();
externalizers.clear();
instanceSeq = 0;
if (byteOutput != null) {
write(TC_RESET);
}
}
public void clearClassCache() throws IOException {
clearInstanceCache();
}
public void start(final ByteOutput byteOutput) throws IOException {
blockMarshaller = new BlockMarshaller(this, bufferSize < MIN_BUFFER_SIZE ? MIN_BUFFER_SIZE : bufferSize);
super.start(byteOutput);
}
protected void doStart() throws IOException {
super.doStart();
writeShort(configuredVersion);
}
public void finish() throws IOException {
super.finish();
blockMarshaller = null;
oos = null;
}
public void flush() throws IOException {
final BlockMarshaller blockMarshaller = this.blockMarshaller;
if (blockMarshaller != null) {
blockMarshaller.flush();
}
super.flush();
}
}
|
package io.spine.server.aggregate;
import io.spine.annotation.SPI;
import io.spine.core.BoundedContextName;
import io.spine.core.CommandClass;
import io.spine.core.CommandEnvelope;
import io.spine.core.CommandId;
import io.spine.core.Event;
import io.spine.core.EventClass;
import io.spine.core.EventEnvelope;
import io.spine.core.EventId;
import io.spine.core.TenantId;
import io.spine.server.BoundedContext;
import io.spine.server.aggregate.model.AggregateClass;
import io.spine.server.command.CaughtError;
import io.spine.server.command.CommandErrorHandler;
import io.spine.server.commandbus.CommandDispatcher;
import io.spine.server.delivery.Shardable;
import io.spine.server.delivery.ShardedStreamConsumer;
import io.spine.server.delivery.ShardingStrategy;
import io.spine.server.delivery.UniformAcrossTargets;
import io.spine.server.entity.EntityLifecycle;
import io.spine.server.entity.EventFilter;
import io.spine.server.entity.Repository;
import io.spine.server.event.EventBus;
import io.spine.server.event.EventDispatcherDelegate;
import io.spine.server.event.RejectionEnvelope;
import io.spine.server.route.CommandRouting;
import io.spine.server.route.EventRoute;
import io.spine.server.route.EventRouting;
import io.spine.server.stand.Stand;
import io.spine.server.storage.Storage;
import io.spine.server.storage.StorageFactory;
import java.util.Collection;
import java.util.Optional;
import java.util.Set;
import java.util.function.Supplier;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Suppliers.memoize;
import static com.google.common.collect.ImmutableList.of;
import static io.spine.option.EntityOption.Kind.AGGREGATE;
import static io.spine.server.aggregate.model.AggregateClass.asAggregateClass;
import static io.spine.server.tenant.TenantAwareRunner.with;
import static io.spine.util.Exceptions.newIllegalStateException;
/**
* The repository which manages instances of {@code Aggregate}s.
*
* @param <I> the type of the aggregate IDs
* @param <A> the type of the aggregates managed by this repository
* @apiNote This class is made {@code abstract} for preserving type information of aggregate ID and
* aggregate classes used by implementations.
*/
@SuppressWarnings({"ClassWithTooManyMethods", "OverlyCoupledClass"})
public abstract class AggregateRepository<I, A extends Aggregate<I, ?, ?>>
extends Repository<I, A>
implements CommandDispatcher<I>,
EventDispatcherDelegate<I>,
Shardable {
/** The default number of events to be stored before a next snapshot is made. */
static final int DEFAULT_SNAPSHOT_TRIGGER = 100;
/** The routing schema for commands handled by the aggregates. */
private final CommandRouting<I> commandRouting = CommandRouting.newInstance();
/** The routing schema for events to which aggregates react. */
private final EventRouting<I> eventRouting =
EventRouting.withDefault(EventRoute.byProducerId());
/**
* The routing for event import, which by default obtains the target aggregate ID as the
* {@linkplain io.spine.core.EventContext#getProducerId() producer ID} of the event.
*/
private final EventRouting<I> eventImportRoute =
EventRouting.withDefault(EventRoute.byProducerId());
private final Supplier<AggregateCommandDelivery<I, A>> commandDeliverySupplier =
memoize(this::createCommandDelivery);
private final Supplier<AggregateEventDelivery<I, A>> eventDeliverySupplier =
memoize(this::createEventDelivery);
/**
* The {@link CommandErrorHandler} tackling the dispatching errors.
*
* <p>This field is not {@code final} only because it is initialized in {@link #onRegistered()}
* method.
*/
private CommandErrorHandler commandErrorHandler;
/** The number of events to store between snapshots. */
private int snapshotTrigger = DEFAULT_SNAPSHOT_TRIGGER;
/** Creates a new instance. */
protected AggregateRepository() {
super();
}
/**
* {@inheritDoc}
*
* <p>{@code AggregateRepository} also registers itself with:
*
* <ul>
* <li>{@link io.spine.server.commandbus.CommandBus CommandBus},
* {@link io.spine.server.event.EventBus EventBus}, and
* {@link io.spine.server.aggregate.ImportBus ImportBus} of
* the parent {@code BoundedContext} for dispatching messages to its aggregates;
* <li>{@link io.spine.server.delivery.Sharding#register(io.spine.server.delivery.Shardable)
* Sharding} for grouping of messages sent to its aggregates.
* </ul>
*/
@Override
public void onRegistered() {
checkNotVoid();
super.onRegistered();
BoundedContext boundedContext = getBoundedContext();
boundedContext.registerCommandDispatcher(this);
boundedContext.registerEventDispatcher(this);
if (aggregateClass().importsEvents()) {
boundedContext.getImportBus()
.register(EventImportDispatcher.of(this));
}
this.commandErrorHandler = boundedContext.createCommandErrorHandler();
registerWithSharding();
}
/**
* Ensures that this repository dispatches at least one kind of messages.
*/
private void checkNotVoid() {
boolean handlesCommands = dispatchesCommands();
boolean reactsOnEvents = dispatchesEvents() || dispatchesExternalEvents();
if (!handlesCommands && !reactsOnEvents) {
throw newIllegalStateException(
"Aggregates of the repository %s neither handle commands" +
" nor react on events.", this);
}
}
@Override
public A create(I id) {
A aggregate = aggregateClass().createEntity(id);
return aggregate;
}
/** Obtains class information of aggregates managed by this repository. */
protected final AggregateClass<A> aggregateClass() {
return (AggregateClass<A>) entityClass();
}
@Override
protected AggregateClass<A> getModelClass(Class<A> cls) {
return asAggregateClass(cls);
}
@Override
public AggregateClass<A> getShardedModelClass() {
return aggregateClass();
}
/**
* Stores the passed aggregate and commits its uncommitted events.
*/
@Override
protected void store(A aggregate) {
Write<I> operation = Write.operationFor(this, aggregate);
operation.perform();
}
/**
* Creates aggregate storage for the repository.
*
* @param factory the factory to create the storage
* @return new storage
*/
@Override
protected Storage<I, ?, ?> createStorage(StorageFactory factory) {
Storage<I, ?, ?> result = factory.createAggregateStorage(getEntityClass());
return result;
}
@Override
public Set<CommandClass> getMessageClasses() {
return aggregateClass().getCommands();
}
/**
* Dispatches the passed command to an aggregate.
*
* <p>The aggregate ID is obtained from the passed command.
*
* <p>The repository loads the aggregate by this ID, or creates a new aggregate
* if there is no aggregate with such ID.
*
* @param envelope the envelope of the command to dispatch
*/
@Override
public I dispatch(CommandEnvelope envelope) {
checkNotNull(envelope);
I target = with(envelope.getTenantId())
.evaluate(() -> doDispatch(envelope));
return target;
}
private I doDispatch(CommandEnvelope envelope) {
I target = route(envelope);
lifecycleOf(target).onDispatchCommand(envelope.getCommand());
dispatchTo(target, envelope);
return target;
}
private I route(CommandEnvelope envelope) {
CommandRouting<I> routing = getCommandRouting();
I target = routing.apply(envelope.getMessage(), envelope.getCommandContext());
onCommandTargetSet(target, envelope.getId());
return target;
}
private void dispatchTo(I id, CommandEnvelope envelope) {
AggregateCommandEndpoint<I, A> endpoint = new AggregateCommandEndpoint<>(this, envelope);
endpoint.dispatchTo(id);
}
/**
* Handles the given error.
*
* <p>If the given error is a rejection, posts the rejection event into
* the {@link EventBus}. Otherwise, logs the error.
*
* @param envelope the command which caused the error
* @param exception the error occurred during processing of the command
*/
@Override
public void onError(CommandEnvelope envelope, RuntimeException exception) {
CaughtError error = commandErrorHandler.handleError(envelope, exception);
error.asRejection()
.map(RejectionEnvelope::getOuterObject)
.ifPresent(event -> postEvents(of(event)));
error.rethrowOnce();
}
@Override
public Set<EventClass> getEventClasses() {
return aggregateClass().getEventClasses();
}
@Override
public Set<EventClass> getExternalEventClasses() {
return aggregateClass().getExternalEventClasses();
}
public Set<EventClass> getImportableEventClasses() {
return aggregateClass().getImportableEventClasses();
}
/**
* Dispatches event to one or more aggregates reacting on the event.
*
* @param envelope the event
* @return identifiers of aggregates that reacted on the event
*/
@Override
public Set<I> dispatchEvent(EventEnvelope envelope) {
checkNotNull(envelope);
Set<I> targets = with(envelope.getTenantId())
.evaluate(() -> doDispatch(envelope));
return targets;
}
private Set<I> doDispatch(EventEnvelope envelope) {
Set<I> targets = route(envelope);
targets.forEach(id -> dispatchTo(id, envelope));
return targets;
}
private Set<I> route(EventEnvelope envelope) {
EventRouting<I> routing = getEventRouting();
Set<I> targets = routing.apply(envelope.getMessage(), envelope.getEventContext());
return targets;
}
private void dispatchTo(I id, EventEnvelope envelope) {
AggregateEventEndpoint<I, A> endpoint =
new AggregateEventReactionEndpoint<>(this, envelope);
endpoint.dispatchTo(id);
}
boolean importsEvent(EventClass eventClass) {
boolean result = aggregateClass().getImportableEventClasses()
.contains(eventClass);
return result;
}
/**
* Imports the passed event into one of the aggregates.
*/
I importEvent(EventEnvelope envelope) {
checkNotNull(envelope);
I target = routeImport(envelope);
EventImportEndpoint<I, A> endpoint = new EventImportEndpoint<>(this, envelope);
endpoint.dispatchTo(target);
return target;
}
private I routeImport(EventEnvelope envelope) {
Set<I> ids = getEventImportRouting().apply(envelope.getMessage(),
envelope.getEventContext());
int numberOfTargets = ids.size();
checkState(
numberOfTargets > 0,
"Could not get aggregate ID from the event context: `%s`. Event class: `%s`.",
envelope.getEventContext(),
envelope.getMessageClass()
);
checkState(
numberOfTargets == 1,
"Expected one aggregate ID, but got %s (`%s`). Event class: `%s`, context: `%s`.",
String.valueOf(numberOfTargets),
ids,
envelope.getMessageClass(),
envelope.getEventContext()
);
I id = ids.stream()
.findFirst()
.get();
onImportTargetSet(id, envelope.getId());
return id;
}
@Override
public void onError(EventEnvelope envelope, RuntimeException exception) {
checkNotNull(envelope);
checkNotNull(exception);
logError("Error reacting on event (class: %s id: %s) in aggregate of type %s.",
envelope, exception);
}
/**
* Obtains command routing instance used by this repository.
*/
protected final CommandRouting<I> getCommandRouting() {
return commandRouting;
}
/**
* Obtains event routing instance used by this repository.
*/
protected final EventRouting<I> getEventRouting() {
return eventRouting;
}
/**
* Obtains the event import routing, which by default uses
* {@linkplain io.spine.core.EventContext#getProducerId() producer ID} of the event
* as the target aggregate ID.
*
* <p>This default routing requires that {@link Event Event} instances
* {@linkplain ImportBus#post(com.google.protobuf.Message, io.grpc.stub.StreamObserver) posted}
* for import must {@link io.spine.core.EventContext#getProducerId() contain} the ID of the
* target aggregate. Not providing a valid aggregate ID would result in
* {@code RuntimeException}.
*
* <p>Some aggregates may produce events with the aggregate ID as the first field of an event
* message. To set the default routing for repositories of such aggregates, please use the
* code below:
*
* <pre>{@code
* getEventImportRouting().replaceDefault(EventRoute.fromFirstMessageField());
* }</pre>
*
* Consider adding this code to the constructor of your {@code AggregateRepository} class.
*/
protected final EventRouting<I> getEventImportRouting() {
return eventImportRoute;
}
/**
* Posts passed events to {@link EventBus}.
*/
void postEvents(Collection<Event> events) {
EventFilter filter = eventFilter();
Iterable<Event> filteredEvents = filter.filter(events);
EventBus bus = getBoundedContext().getEventBus();
bus.post(filteredEvents);
}
private void updateStand(TenantId tenantId, A aggregate) {
getStand().post(tenantId, aggregate);
}
/**
* Returns the number of events until a next {@code Snapshot} is made.
*
* @return a positive integer value
* @see #DEFAULT_SNAPSHOT_TRIGGER
*/
protected int getSnapshotTrigger() {
return this.snapshotTrigger;
}
/**
* Changes the number of events between making aggregate snapshots to the passed value.
*
* <p>The default value is defined in {@link #DEFAULT_SNAPSHOT_TRIGGER}.
*
* @param snapshotTrigger a positive number of the snapshot trigger
*/
protected void setSnapshotTrigger(int snapshotTrigger) {
checkArgument(snapshotTrigger > 0);
this.snapshotTrigger = snapshotTrigger;
}
protected AggregateStorage<I> aggregateStorage() {
@SuppressWarnings("unchecked") // We check the type on initialization.
AggregateStorage<I> result = (AggregateStorage<I>) getStorage();
return result;
}
/**
* Loads or creates an aggregate by the passed ID.
*
* @param id the ID of the aggregate
* @return loaded or created aggregate instance
*/
A loadOrCreate(I id) {
Optional<A> optional = load(id);
if (optional.isPresent()) {
return optional.get();
}
A result = create(id);
return result;
}
/**
* Loads an aggregate by the passed ID.
*
* <p>This method defines the basic flow of an {@code Aggregate} loading. First,
* the {@linkplain AggregateStateRecord Aggregate history} is
* {@linkplain #fetchHistory fetched} from the storage. Then the {@code Aggregate} is
* {@linkplain #play restored} from its state history.
*
* @param id the ID of the aggregate
* @return the loaded instance or {@code Optional.empty()} if there is no {@code Aggregate}
* with the ID
*/
private Optional<A> load(I id) {
Optional<AggregateStateRecord> eventsFromStorage = fetchHistory(id);
if (eventsFromStorage.isPresent()) {
A result = play(id, eventsFromStorage.get());
return Optional.of(result);
} else {
lifecycleOf(id).onEntityCreated(AGGREGATE);
return Optional.empty();
}
}
/**
* Fetches the history of the {@code Aggregate} with the given ID.
*
* <p>To read an {@link AggregateStateRecord} from an {@link AggregateStorage},
* a {@linkplain #getSnapshotTrigger() snapshot trigger} is used as a
* {@linkplain AggregateReadRequest#getBatchSize() batch size}.
*
* @param id the ID of the {@code Aggregate} to fetch
* @return the {@link AggregateStateRecord} for the {@code Aggregate} or
* {@code Optional.empty()} if there is no record with the ID
*/
protected Optional<AggregateStateRecord> fetchHistory(I id) {
AggregateReadRequest<I> request = new AggregateReadRequest<>(id, snapshotTrigger);
Optional<AggregateStateRecord> eventsFromStorage = aggregateStorage().read(request);
return eventsFromStorage;
}
/**
* Plays the given {@linkplain AggregateStateRecord Aggregate history} for an instance
* of {@link Aggregate} with the given ID.
*
* @param id the ID of the {@code Aggregate} to load
* @param history the state record of the {@code Aggregate} to load
* @return an instance of {@link Aggregate}
*/
protected A play(I id, AggregateStateRecord history) {
A result = create(id);
AggregateTransaction tx = AggregateTransaction.start(result);
result.play(history);
tx.commit();
return result;
}
/**
* Invoked by an endpoint after a message was dispatched to the aggregate.
*
* @param tenantId the tenant associated with the processed message
* @param aggregate the updated aggregate
*/
void onModifiedAggregate(TenantId tenantId, A aggregate) {
store(aggregate);
updateStand(tenantId, aggregate);
}
/**
* Loads an aggregate by the passed ID.
*
* <p>An aggregate will be loaded even if
* {@link io.spine.server.entity.EntityWithLifecycle#isArchived() archived}
* or {@link io.spine.server.entity.EntityWithLifecycle#isDeleted() deleted} lifecycle
* attribute, or both of them, are set to {@code true}.
*
* @param id the ID of the aggregate to load
* @return the aggregate instance, or {@link Optional#empty() empty()} if there is no
* aggregate with such ID
*/
@Override
public Optional<A> find(I id) throws IllegalStateException {
Optional<A> result = load(id);
return result;
}
/** The Stand instance for sending updated aggregate states. */
private Stand getStand() {
return getBoundedContext().getStand();
}
/**
* Defines a strategy of event delivery applied to the instances managed by this repository.
*
* <p>By default uses direct delivery.
*
* <p>Descendants may override this method to redefine the strategy. In particular,
* it is possible to postpone dispatching of a certain event to a particular aggregate
* instance at runtime.
*
* @return delivery strategy for events applied to the instances managed by this repository
*/
@SPI
protected AggregateDelivery<I, A, EventEnvelope, ?, ?> getEventEndpointDelivery() {
return eventDeliverySupplier.get();
}
/**
* Defines a strategy of command delivery applied to the instances managed by this repository.
*
* <p>By default uses direct delivery.
*
* <p>Descendants may override this method to redefine the strategy. In particular,
* it is possible to postpone dispatching of a certain command to a particular aggregate
* instance at runtime.
*
* @return delivery strategy for rejections
*/
@SPI
protected AggregateDelivery<I, A, CommandEnvelope, ?, ?> getCommandEndpointDelivery() {
return commandDeliverySupplier.get();
}
/**
* {@inheritDoc}
*
* <p>Overridden to expose the method into current package.
*/
@Override
protected EntityLifecycle lifecycleOf(I id) {
return super.lifecycleOf(id);
}
void onDispatchEvent(I id, Event event) {
lifecycleOf(id).onDispatchEventToReactor(event);
}
private void onCommandTargetSet(I id, CommandId commandId) {
lifecycleOf(id).onTargetAssignedToCommand(commandId);
}
private void onImportTargetSet(I id, EventId eventId) {
lifecycleOf(id).onImportTargetSet(eventId);
}
void onEventImported(I id, Event event) {
lifecycleOf(id).onEventImported(event);
}
private AggregateEventDelivery<I, A> createEventDelivery() {
return new AggregateEventDelivery<>(this);
}
private AggregateCommandDelivery<I, A> createCommandDelivery() {
return new AggregateCommandDelivery<>(this);
}
@Override
public ShardingStrategy getShardingStrategy() {
return UniformAcrossTargets.singleShard();
}
@Override
public Iterable<ShardedStreamConsumer<?, ?>> getMessageConsumers() {
Iterable<ShardedStreamConsumer<?, ?>> result =
of(
getCommandEndpointDelivery().getConsumer(),
getEventEndpointDelivery().getConsumer()
);
return result;
}
@Override
public BoundedContextName getBoundedContextName() {
BoundedContextName name = getBoundedContext().getName();
return name;
}
@Override
public void close() {
unregisterWithSharding();
super.close();
}
}
|
package io.spine.server.entity;
import com.google.protobuf.Message;
import io.spine.annotation.Internal;
import io.spine.base.Identifier;
import io.spine.core.Version;
import io.spine.server.entity.Repository.Lifecycle;
import io.spine.validate.ValidatingBuilder;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import java.util.List;
import java.util.Set;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableSet.copyOf;
import static com.google.common.collect.Lists.newLinkedList;
@Internal
public final class EntityLifecycleMonitor<I,
E extends TransactionalEntity<I, S, B>,
S extends Message,
B extends ValidatingBuilder<S, ? extends Message.Builder>>
implements TransactionListener<I, E, S, B> {
private final Repository<I, ?> repository;
private final List<Message> acknowledgedMessageIds;
private @MonotonicNonNull I entityId;
private EntityLifecycleMonitor(Repository<I, ?> repository) {
this.repository = repository;
this.acknowledgedMessageIds = newLinkedList();
}
/**
* Creates a new instance of {@code EntityLifecycleMonitor}.
*
* @param repository the repository of the entity under transaction
*/
public static
<I,
E extends TransactionalEntity<I, S, B>,
S extends Message,
B extends ValidatingBuilder<S, ? extends Message.Builder>>
TransactionListener<I, E, S, B> newInstance(Repository<I, ?> repository) {
checkNotNull(repository);
return new EntityLifecycleMonitor<>(repository);
}
/**
* {@inheritDoc}
*
* <p>Memoizes the ID of the event applied by the given phase. The received event IDs will be
* reported to the {@link Repository.Lifecycle} after a successful commit.
*/
@Override
public void onAfterPhase(Transaction.Phase<I, E, S, B> phase) {
checkSameEntity(phase.getUnderlyingTransaction()
.getEntity());
Message messageId = phase.eventId();
acknowledgedMessageIds.add(messageId);
}
@Override
public void onBeforeCommit(E entity, S state, Version version, LifecycleFlags lifecycleFlags) {
// NOP.
}
/**
* {@inheritDoc}
*
* <p>Notifies the {@link Lifecycle} of the entity state change.
*/
@Override
public void onAfterCommit(EntityRecordChange change) {
Set<Message> messageIds = copyOf(acknowledgedMessageIds);
I id = Identifier.unpack(change.getPreviousValue()
.getEntityId());
Lifecycle lifecycle = repository.lifecycleOf(id);
lifecycle.onStateChanged(change, messageIds);
}
/**
* {@inheritDoc}
*
* <p>Logs the occurred {@link Throwable}.
*/
@Override
public void onTransactionFailed(Throwable t, E entity, S state, Version version,
LifecycleFlags lifecycleFlags) {
// NOP.
}
private void checkSameEntity(E entity) throws IllegalStateException {
I idToCheck = entity.getId();
if (entityId == null) {
entityId = idToCheck;
} else {
checkState(entityId.equals(idToCheck),
"Tried to reuse an instance of %s for multiple transactions.",
EntityLifecycleMonitor.class.getSimpleName());
}
}
}
|
package functional.json;
import com.fasterxml.jackson.databind.JsonNode;
import functional.ApplicationTests;
import uk.gov.openregister.JsonObjectMapper;
import org.json.JSONException;
import org.junit.Test;
import org.skyscreamer.jsonassert.JSONAssert;
import play.libs.Json;
import play.libs.ws.WSResponse;
import uk.gov.openregister.domain.Record;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static org.fest.assertions.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static play.test.Helpers.ACCEPTED;
import static play.test.Helpers.BAD_REQUEST;
@SuppressWarnings("unchecked")
public class CreateRecordTest extends ApplicationTests {
@Test
public void testCreateARecordReturns202() {
String json = "{\"test-register\":\"testregisterkey\",\"name\":\"entryName\",\"name\":\"entryName\",\"key1\": \"value1\",\"key2\": \"value2\"}";
WSResponse response = postJson("/create", json);
assertThat(response.getStatus()).isEqualTo(ACCEPTED);
}
@Test
public void testCreateARecordWithMalformedRequestReturns400() {
String json = "this is not json";
WSResponse response = postJson("/create", json);
assertThat(response.getStatus()).isEqualTo(BAD_REQUEST);
}
@Test
public void testCreateARecordWithInvalidKeysReturns400() throws JSONException {
String json = "{\"test-register\":\"testregisterkey\",\"name\":\"entryName\",\"invalidKey\": \"value1\",\"key1\": \"value1\",\"key2\": \"value2\"}";
WSResponse response = postJson("/create", json);
assertThat(response.getStatus()).isEqualTo(400);
assertThat(response.getBody()).contains("Key not required");
}
@Test
public void testCreateARecordWithInvalidAndMissingKeysReturns400() {
String json = "{\"name\":\"entryName\",\"invalidKey\": \"value1\",\"key2\": \"value2\"}";
WSResponse response = postJson("/create", json);
assertThat(response.getStatus()).isEqualTo(400);
}
@Test
public void testCreateARecordStoresItToTheDatabase() {
String json = "{\"test-register\":\"testregisterkey\",\"name\":\"entryName\",\"key1\":\"value1\",\"key2\":\"value2\"}";
WSResponse response = postJson("/create", json);
assertThat(response.getStatus()).isEqualTo(ACCEPTED);
WSResponse wsResponse = getByKV("name", "entryName", "json");
String body = wsResponse.getBody();
JsonNode receivedEntry = Json.parse(body).get("entry");
assertThat(receivedEntry.asText()).isEqualTo(Json.parse(json).asText());
}
@Test
public void createANewRecordWithDuplicatePrimaryKeyDataReturns400() {
String json = "{\"test-register\":\"testre'gisterkey\",\"name\":\"entryName\",\"key1\":\"value1\",\"key2\":\"value2\"}";
WSResponse response = postJson("/create", json);
assertThat(response.getStatus()).isEqualTo(ACCEPTED);
response = postJson("/create", json.replaceAll("value1", "newValue"));
assertThat(response.getStatus()).isEqualTo(BAD_REQUEST);
}
@Test
public void updatingARecordUpdatesTheEntryInRegister() {
String json = "{\"test-register\":\"testregisterkey\",\"name\":\"entryName\",\"key1\":\"value1\",\"key2\":\"value2\"}";
String hash = new Record(json).getHash();
postJson("/create", json);
String updatedJson = json.replaceAll("value1", "newValue");
WSResponse response = postJson("/supersede/" + hash, updatedJson);
assertThat(response.getStatus()).isEqualTo(ACCEPTED);
WSResponse wsResponse = getByKV("key2", "value2", "json");
String body = wsResponse.getBody();
JsonNode receivedEntry = Json.parse(body).get("entry");
assertThat(receivedEntry.asText()).isEqualTo(Json.parse(updatedJson).asText());
}
@Test
public void updateARecordValidatesTheJson() {
String json = "{\"test-register\":\"testregisterkey\",\"name\":\"entryName\",\"key1\": \"value1\",\"key2\": \"value2\"}";
Record record = new Record(json);
postJson("/create", json);
String updatedJson = "{\"test-register\":\"\",\"name\":\"entryName\"}";
WSResponse response = postJson("/supersede/" + record.getHash(), updatedJson);
assertThat(response.getBody())
.contains("Missing required key");
}
@Test
public void updateARecordReturns400Json_whenThereIsNoRecordWithTheGivenHash() {
String updatedJson = "{\"test-register\":\"testregisterkey\",\"name\":\"entryName\",\"key1\": \"value1\",\"key2\": \"value2\"}";
WSResponse response = postJson("/supersede/nonExistingHash", updatedJson);
assertThat(response.getStatus()).isEqualTo(400);
}
@Test
public void updateARecordReturns400Json_whenTryingToUpdatePrimaryKeyColumn() {
String json = "{\"test-register\":\"testregisterkey\",\"name\":\"entryName\",\"key1\": \"value1\",\"key2\": \"value2\"}";
Record record = new Record(json);
assertEquals(202, postJson("/create", json).getStatus());
String updatedJson = "{\"test-register\":\"new'PrimaryKey\",\"name\":\"entryName\",\"key1\": \"value1\",\"key2\": \"value2\"}";
WSResponse response = postJson("/supersede/" + record.getHash(), updatedJson);
assertThat(response.getStatus()).isEqualTo(400);
}
}
|
package org.sfm.datastax;
import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.Batch;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import org.junit.Test;
import org.sfm.beans.DbObject;
import org.sfm.map.Mapper;
import org.sfm.reflect.TypeReference;
import java.lang.reflect.Type;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.LockSupport;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
public class SettableDataMapperTest extends AbstractDatastaxTest {
public static final String QUERY = "insert into " +
"dbobjects(id, name, email, creation_time, type_ordinal, type_name) " +
"values(?, ?, ?, ?, ?, ?)";
List<DbObject> dbObjects = Arrays.asList(DbObject.newInstance(), DbObject.newInstance());
@Test
public void testInsertDbObjects() throws Exception {
testInSession(new Callback() {
@Override
public void call(Session session) throws Exception {
PreparedStatement preparedStatement = session.prepare(QUERY);
DatastaxBinder<DbObject> datastaxBinder = DatastaxMapperFactory.newInstance().mapFrom(DbObject.class);
session.execute(datastaxBinder.mapTo(dbObjects.get(0), preparedStatement));
checkObjectInserted(session, 0);
}
});
}
protected void checkObjectInserted(Session session, int i) {
DbObject object = dbObjects.get(i);
DatastaxMapper<DbObject> dbObjectDatastaxMapper = DatastaxMapperFactory.newInstance().mapTo(DbObject.class);
BoundStatement boundStatement = session.prepare("select * from dbobjects where id = ?").bind(object.getId());
ResultSet execute = session.execute(boundStatement);
DbObject actual = dbObjectDatastaxMapper.iterator(execute).next();
assertEquals(object, actual);
}
@Test
public void testInsertDbObjectsBatch() throws Exception {
testInSession(new Callback() {
@Override
public void call(Session session) throws Exception {
Batch bs = QueryBuilder.batch();
bs.add(new SimpleStatement(QUERY));
bs.add(new SimpleStatement(QUERY));
PreparedStatement preparedStatement = session.prepare(bs);
DatastaxBinder<List<DbObject>> datastaxBinder = DatastaxMapperFactory.newInstance().disableAsm(true).mapFrom(new TypeReference<List<DbObject>>() {
});
Statement statement = datastaxBinder.mapTo(dbObjects, preparedStatement);
statement.enableTracing();
checkObjectInserted(session, 0);
checkObjectInserted(session, 1);
}
});
}
@Test
public void testUpdateDbObjectsBatch() throws Exception {
testInSession(new Callback() {
@Override
public void call(Session session) throws Exception {
PreparedStatement preparedStatement = session.prepare(QUERY);
DatastaxBinder<DbObject> datastaxBinder = DatastaxMapperFactory.newInstance().mapFrom(DbObject.class);
DbObject value = dbObjects.get(0);
session.execute(datastaxBinder.mapTo(value, preparedStatement));
checkObjectInserted(session, 0);
PreparedStatement updateStatement = session.prepare("UPDATE dbobjects SET name = ? WHERE id = ?");
value.setName("newname");
session.execute(datastaxBinder.mapTo(value, updateStatement));
checkObjectInserted(session, 0);
}
});
}
}
|
package com.facebook.redextest;
import static org.fest.assertions.api.Assertions.assertThat;
import java.io.*;
import java.util.*;
import org.junit.Test;
import com.facebook.proguard.annotations.DoNotStrip;
import com.facebook.redextest.InstrumentBasicBlockAnalysis;
@DoNotStrip
public class InstrumentBasicBlockTarget {
// Information taken from source dictionary and metadata
// Index: 0
// Offset within Stats Array: 8
@DoNotStrip
public static int testFunc01(int foo) {
return 42;
}
// Index: 1
// Offset within Stats Array: 10
// Bit-Vector 0: [2,1]
@DoNotStrip
public static int testFunc02(int a, int b) {
if (a > 0) {
return a / b;
} else {
return a * b;
}
}
// Index: 2
// Offset within Stats Array: 13
// Bit-Vector 0: [2,1]
@DoNotStrip
public static int testFunc03(int a, int b) {
if (a > 0) {
a++;
}
return a;
}
// Index: 3
// Offset within Stats Array: 16
// Bit-Vector 0: [5,4,2,1]
@DoNotStrip
@SuppressWarnings({"EmptyCatchBlock", "CatchGeneralException"})
public static int testFunc04(int a, int b) {
try {
int[] arr = new int[10];
arr[10] = a;
} catch (Exception e) {
throw e;
}
return (b % 2);
}
// Index: 4
// Offset within Stats Array: 19
// Bit-Vector 0: [4,3,5,2,1]
@DoNotStrip
public static int testFunc05(int flag, int temp_var) {
int z = 0;
if (flag != 0) {
System.out.println("It's True!!");
z += 1;
if (temp_var > 4) {
System.out.println("Greater than 4");
z += 1;
} else {
System.out.println("Couldnt make it to 4!, early return");
z -= 1;
return z;
}
} else {
System.out.println("Not True :(");
z -= 1;
}
System.out.println("After Test: " + temp_var);
return z;
}
// Index: 5
// Offset within Stats Array: 22
// Bit-Vector 0: [4,3,2,1]
@DoNotStrip
public static int testFunc06(int a, int b) {
try {
return a / b;
} catch (ArithmeticException e) {
System.out.println("ArithmeticException");
return 13;
}
}
// Index: 6
// Offset within Stats Array: 25
// Bit-Vector 0: [9,8,7,6,4,3,2,1]
@DoNotStrip
public static int testFunc07(int a, int b) {
if (a % 2 == 0) {
try {
return a / b;
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Exception: " + a + ": " + e.getMessage());
}
} else {
try {
return b / a;
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("Exception: " + b + ": " + e.getMessage());
}
}
}
// Index: 7
// Offset within Stats Array: 28
// Bit-Vector 0: [9,10,12,11,8,7,6,5,4,3,2,1]
@DoNotStrip
@SuppressWarnings("CatchGeneralException")
public static int testFunc08(int size, int index, int num) {
try {
int[] arr = new int[size];
arr[index] = num / index;
if (index % 2 == 0) {
index = index * 2020;
return arr[index];
}
} catch (ArrayIndexOutOfBoundsException e) {
System.out.println("ArrayIndexOutOfBoundsException");
return 7;
} catch (ArithmeticException e) {
System.out.println("ArithmeticException");
return 13;
} catch (Exception e) {
System.out.println("Exception");
throw e;
}
return 9;
}
// Index: 8
// Offset within Stats Array: 31
// Bit-Vector 0: [43,44,45,46,47,48,49,50,51,52,53,54,55,3,2,1]
// Bit-Vector 1: [27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42]
// Bit-Vector 2: [11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26]
// Bit-Vector 3: [4,5,6,7,8,9,10]
@DoNotStrip
public static String testFunc09(int test_var) {
if (test_var < 0) {
test_var = test_var * -1;
}
switch (test_var) {
case 1:
return "apple";
case 2:
return "banana";
case 3:
return "cat";
case 4:
return "dog";
case 5:
return "eat";
case 6:
return "fat";
case 7:
return "go";
case 8:
return "hi";
case 9:
return "ice";
case 10:
return "juice";
case 11:
return "kim";
case 12:
return "love";
case 13:
return "mmm";
case 14:
return "ninja";
case 15:
return "orange";
case 16:
return "purge";
case 17:
return "qwerty";
case 18:
return "roll";
case 19:
return "star";
case 20:
return "tar";
case 21:
return "ufo";
case 22:
return "void";
case 23:
return "woman";
case 24:
return "x";
case 25:
return "yellow";
case 26:
return "zz";
case 27:
return "aapple";
case 28:
return "bbanana";
case 29:
return "ccat";
case 30:
return "ddog";
case 31:
return "eeat";
case 32:
return "ffat";
case 33:
return "ggo";
case 34:
return "hhi";
case 35:
return "iice";
case 36:
return "jjuice";
case 37:
return "kkim";
case 38:
return "llove";
case 39:
return "mmmm";
case 40:
return "nninja";
case 41:
return "oorange";
case 42:
return "ppurge";
case 43:
return "qqwerty";
case 44:
return "rroll";
case 45:
return "sstar";
case 46:
return "ttar";
case 47:
return "uufo";
case 48:
return "vvoid";
case 49:
return "wwoman";
case 50:
return "xx";
case 51:
return "yyellow";
case 52:
return "zzz";
}
return "...";
}
// Index: 9
// Offset within Stats Array: 37
// Bit-Vector 0: [0,1,3,4]
@DoNotStrip
public static boolean testFunc10() {
try {
Class.forName("com.facebook.Foo");
return true;
} catch (ClassNotFoundException e) {
System.out.println("can't load it");
return false;
}
}
@DoNotStrip
// Index: 10
// Offset: 40
// Vector 0: [5,4,3,2,1]
public static int testFunc11(int size) {
int i;
int sum = 0;
for (i = 1; i < size; i++) {
if (i % 10 == 0) {
sum *= 10;
}
sum += i;
}
return sum;
}
@DoNotStrip
// Index: 11
// Offset: 43
// Vector 0: [5,4,3,2,1]
public static int testFunc12(int[] array, int value) {
int i;
for (i = 0; i < array.length; i++)
{
if (array[i] == value) {
return i;
}
}
return -1;
}
@DoNotStrip
// Index: 12
// Offset: 46
// Vector 0: [10,12,11,9,7,6,5,4,3,2]
public static int testFunc13(int[] array) {
Random rand = new Random();
int i;
try {
for(i = 0; i < array.length; i++) {
array[i] = rand.nextInt() % array[i];
}
} catch (ArithmeticException e) {
System.out.println("Just used left over sum");
throw e;
}
return 7;
}
@DoNotStrip
// Index: 13
// Offset: 49
// Vector 0: [8,9,7,6,5,4,3,2]
public static int testFunc14(int[] array, int[] array2) {
int i;
int sum = 0;
try {
for(i = 0; i < array.length; i++) {
sum += array2[array[i]];
}
} catch (ArrayIndexOutOfBoundsException e) {
System.out.println("Just used left over sum");
return sum;
}
return sum;
}
@DoNotStrip
// Index: 14
// Offset: 52
// Vector 0: [5,4,3,2,1]
public static int testFunc15(int size) {
int i = 1;
int sum = 0;
while (i < size) {
if (i % 10 == 0) {
sum *= 10;
}
sum += i;
i++;
}
return sum;
}
@DoNotStrip
// Index: 15
// Offset: 55
// Vector 0: [5,4,3,2,1]
public static int testFunc16(int[] array, int value) {
int i = 0;
while (i < array.length) {
if (array[i] == value) {
return i;
}
i++;
}
return -1;
}
@DoNotStrip
// Index: 16
// Offset: 58
// Vector 0: [10,12,11,9,7,6,5,4,3,2]
public static int testFunc17(int[] array) {
Random rand = new Random();
int i = 0;
try {
while(i < array.length) {
array[i] = rand.nextInt() % array[i];
i++;
}
} catch (ArithmeticException e) {
System.out.println("Just used left over sum");
throw e;
}
return 7;
}
@DoNotStrip
// Index: 17
// Offset: 61
// Vector 0: [8,9,7,6,5,4,3,2]
public static int testFunc18(int[] array, int[] array2) {
int i = 0;
int sum = 0;
try {
while(i < array.length) {
sum += array2[array[i]];
i++;
}
} catch (ArrayIndexOutOfBoundsException e) {
System.out.println("Just used left over sum");
return sum;
}
return sum;
}
@Test
@DoNotStrip
public void test01() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc01(0)).isEqualTo(42);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[9]).isEqualTo((short)1);
// TestFunc01 has no bitvector because it only has one basicblock
// that always executes so we do not need to assert it
}
@Test
@DoNotStrip
public void test02() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc02(21,7)).isEqualTo(3);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[11]).isEqualTo((short)1);
// Assert that TestFunc02 excuted only BasicBlocks (1) skipping
// 2 which is [0,1] in the Bit-Vector form due to return in the if condition
assertThat(stats[12]).isEqualTo((short)0b01);
}
@Test
@DoNotStrip
public void test03() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc03(8,9)).isEqualTo(9);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[14]).isEqualTo((short)1);
// Assert that TestFunc03 excuted all BasicBlocks
// which is [1,1] in the Bit-Vector form
assertThat(stats[15]).isEqualTo((short)0b11);
}
@Test
@DoNotStrip
@SuppressWarnings("CatchGeneralException")
public void test04() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
boolean thrown = false;
try {
testFunc04(10,16);
} catch (Exception e) {
System.out.println("Exeception Thrown");
thrown = true;
}
assertThat(thrown).isTrue();
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[17]).isEqualTo((short)1);
// Assert that TestFunc04 excuted only BasicBlocks (1,2,5) skipping
// 3 which is [1,0,1,1] in the Bit-Vector form due to ArrayOutOfBounds exception
assertThat(stats[18]).isEqualTo((short)0b1011);
}
@Test
@DoNotStrip
public void test05() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc05(0,1)).isEqualTo(-1);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[20]).isEqualTo((short)1);
// Assert that TestFunc05 excuted only BasicBlocks (4,5) skipping
// 1,2,3 which is [1,0,1,0,0] in the Bit-Vector form due to flag being zero
assertThat(stats[21]).isEqualTo((short)0b10100);
}
@Test
@DoNotStrip
public void test06() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc06(9,1)).isEqualTo(9);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[23]).isEqualTo((short)1);
// Assert that TestFunc05 excuted only BasicBlocks (1,2,3) skipping
// 4 which is [0,1,1,1] in the Bit-Vector form due to early return in exception
assertThat(stats[24]).isEqualTo((short)0b0111);
}
@Test
@DoNotStrip
public void test07() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc07(8,2)).isEqualTo(4);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[26]).isEqualTo((short)1);
// Assert that TestFunc07 excuted only BasicBlocks (1,2,3) skipping
// 4,6,7,8,9 which is [0,0,0,0,0,1,1,1] in the Bit-Vector form due 8 % 2 = 0 and
// there was no exception handling needed so it returned early
assertThat(stats[27]).isEqualTo((short)0b0111);
}
@Test
@DoNotStrip
@SuppressWarnings("CatchGeneralException")
public void test08() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
int value = 0;
try {
value = testFunc08(5,2,8);
} catch (Exception e) {
System.out.println("Exeception Thrown");
}
assertThat(value).isEqualTo(7);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[29]).isEqualTo((short)1);
// Assert that TestFunc08 excuted only BasicBlocks (1,2,3,4,5,6,7,8,9) skipping
// 10,11,12 which is [1,0,0,0,1,1,1,1,1,1] in the Bit-Vector form due to
// index being 2 and 2 % 2 = 0 which causes an Array Index Out of Bounds Exception
// before it returns early
assertThat(stats[30]).isEqualTo((short)0b1000111111);
}
@Test
@DoNotStrip
public void test09() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc09(16)).isEqualTo("purge");
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[32]).isEqualTo((short)1);
// Assert that TestFunc09 excuted only BasicBlocks (2,40) skipping
// everything else because it didn't have to go into the initial if condition
// because test-flag > 0 and the switch statement made it jump to exact basicblock
// of 16 and returning immediately. As this test case has numerous basicblocks (55),
// it needs four bitvectors so we need to make sure it is [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] in
// Bit-Vector 1 and [0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] in Bit-Vector 2 while making sure the last
// two Bit-Vectors are only zeroes.
assertThat(stats[33]).isEqualTo((short)0b0000000000000010);
assertThat(stats[34]).isEqualTo((short)0b0000000000000100);
assertThat(stats[35]).isEqualTo((short)0b0000000000000000);
assertThat(stats[36]).isEqualTo((short)0b0000000);
}
@Test
@DoNotStrip
public void test10() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc10()).isFalse();
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[38]).isEqualTo((short)1);
// Assert that TestFunc10 excuted only BasicBlocks (0,1,4) skipping
// 3 which is [1,0,1,1] in the Bit-Vector form due to exception
assertThat(stats[39]).isEqualTo((short)0b1011);
}
@Test
@DoNotStrip
public void test11() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc11(15)).isEqualTo(510);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[41]).isEqualTo((short)1);
// Assert that TestFunc11 excuted all BasicBlocks
// which is [1,1,1,1,1] in the Bit-Vector form
assertThat(stats[42]).isEqualTo((short)0b11111);
}
@Test
@DoNotStrip
public void test12() {
int []array = {5,1,3,8,9,0,4};
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc12(array,4)).isEqualTo(6);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[44]).isEqualTo((short)1);
// Assert that TestFunc12 excuted some BasicBlocks (1,2,3,4) skipping
// 5 which is [0,1,1,1,1] in the Bit-Vector form due to early return
assertThat(stats[45]).isEqualTo((short)0b01111);
}
@Test
@DoNotStrip
@SuppressWarnings("CatchGeneralException")
public void test13() {
int []array = {5,1,3,8,9,0,4};
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
try {
testFunc13(array);
} catch (Exception e) {
System.out.println("Exeception Thrown");
}
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[47]).isEqualTo((short)1);
// Assert that TestFunc13 excuted some BasicBlocks (2,3,4,5,6,7,9,11,12) skipping
// 10 which is [0,1,1,1,1,1,1,1,1,1] in the Bit-Vector form because of early throw
assertThat(stats[48]).isEqualTo((short)0b0111111111);
}
@Test
@DoNotStrip
public void test14() {
int []array = {5,1,3,8,9,0,4};
int []array2 = {21,56,11};
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc14(array, array2)).isEqualTo(0);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[50]).isEqualTo((short)1);
// Assert that TestFunc14 excuted some BasicBlocks (2,3,4,5,9)
// which is [0,1,0,0,1,1,1,1] in the Bit-Vector form
assertThat(stats[51]).isEqualTo((short)0b01001111);
}
@Test
@DoNotStrip
public void test15() {
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc15(15)).isEqualTo(510);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[53]).isEqualTo((short)1);
// Assert that TestFunc15 excuted all BasicBlocks (1)
// which is [1,1,1,1,1] in the Bit-Vector form
assertThat(stats[54]).isEqualTo((short)0b11111);
}
@Test
@DoNotStrip
public void test16() {
int []array = {5,1,3,8,9,0,4};
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc16(array,4)).isEqualTo(6);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[56]).isEqualTo((short)1);
// Assert that TestFunc16 excuted some BasicBlocks (1,2,3,4) skipping
// 5 which is [0,1,1,1,1] in the Bit-Vector form due to early return
assertThat(stats[57]).isEqualTo((short)0b01111);
}
@Test
@DoNotStrip
@SuppressWarnings("CatchGeneralException")
public void test17() {
int []array = {5,1,3,8,9,0,4};
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
try {
testFunc17(array);
} catch (Exception e) {
System.out.println("Exeception Thrown");
}
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[59]).isEqualTo((short)1);
// Assert that TestFunc17 excuted some BasicBlocks (2,3,4,5,6,7,9,11,12) skipping
// 10 which is [0,1,1,1,1,1,1,1,1,1] in the Bit-Vector form because of early throw
assertThat(stats[60]).isEqualTo((short)0b0111111111);
}
@Test
@DoNotStrip
public void test18() {
int []array = {5,1,3,8,9,0,4};
int []array2 = {21,56,11};
// Start Tracing Information and run Function before stopping
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc18(array, array2)).isEqualTo(0);
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that only one function was executed at all
// because we cleanup stats array before every tracing
assertThat(stats[62]).isEqualTo((short)1);
// Assert that TestFunc18 executed some BasicBlocks (2,3,4,5,9)
// which is [0,1,0,0,1,1,1,1] in the Bit-Vector form
assertThat(stats[63]).isEqualTo((short)0b01001111);
}
@Test
@DoNotStrip
public void test19() {
// Start Tracing Information and run Function before stopping
// TestFunc11 is equivalent to testFunc15 with only difference
// being while loop being replace with for loop
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc15(9)).isEqualTo(testFunc11(9));
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that testFunc11 is ran once like testFunc15
assertThat(stats[52]).isEqualTo((short)1);
assertThat(stats[52]).isEqualTo(stats[40]);
// Assert that TestFunc18 executed some BasicBlocks (1,2,4,5) skipping 3
// which is [1,1,0,1,1] in the Bit-Vector form because of the if-condition
// always being false
// TestFunc15's bit-vector should be the same as testFunc11
// because they are equivalent codes
assertThat(stats[54]).isEqualTo((short)0b11011);
assertThat(stats[54]).isEqualTo(stats[42]);
}
@Test
@DoNotStrip
public void test20() {
int []array = {1,3};
int []array2 = {21,56,11};
// Start Tracing Information and run Function before stopping
// TestFunc18 is equivalent to testFunc14 with only difference
// being while loop being replace with for loop
InstrumentBasicBlockAnalysis.startTracing();
assertThat(testFunc18(array, array2)).isEqualTo(testFunc14(array, array2));
InstrumentBasicBlockAnalysis.stopTracing();
// Get Stats from Instrument Analysis
short[] stats = InstrumentBasicBlockAnalysis.getStats();
// Assert that testFunc14 is ran once like testFunc18
assertThat(stats[61]).isEqualTo((short)1);
assertThat(stats[61]).isEqualTo(stats[49]);
// Assert that TestFunc18 executed some BasicBlocks (2,3,4,5,9)
// which is [0,1,0,0,1,1,1,1] in the Bit-Vector form
// TestFunc18's bit-vector should be the same as testFunc14
// because they are equivalent codes
assertThat(stats[63]).isEqualTo((short)0b01111111);
assertThat(stats[63]).isEqualTo(stats[51]);
}
}
|
package org.slc.sli.api.security;
import java.io.IOException;
import java.util.Enumeration;
import javax.servlet.FilterChain;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.web.filter.GenericFilterBean;
/**
* A security filter responsible for checking SLI session
*
* @author dkornishev
*
*/
public class SLIProcessingFilter extends GenericFilterBean {
private static final Logger LOG = LoggerFactory.getLogger(SLIAuthenticationEntryPoint.class);
private static final String PARAM_SESSION = "sessionId";
private static final String HEADER_SESSION_NAME = "sessionId";
private SecurityTokenResolver resolver;
/**
* Intercepter method called by spring
* Checks cookies to see if SLI session id exists
* If session does exist, resolution will be attempted
*/
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
HttpServletRequest req = (HttpServletRequest) request;
String sessionId = null;
@SuppressWarnings("unchecked")
Enumeration<String> e = req.getHeaderNames();
while (e.hasMoreElements()) {
String header = e.nextElement();
String headerValue = req.getHeader(header);
LOG.debug("[H]" + header + "->" + headerValue);
if (HEADER_SESSION_NAME.equals(header)) {
sessionId = headerValue;
}
}
if (req.getParameter(PARAM_SESSION) != null) {
sessionId = req.getParameter(PARAM_SESSION);
}
if (sessionId != null) {
SecurityContextHolder.getContext().setAuthentication(resolver.resolve(sessionId));
}
chain.doFilter(request, response);
}
public void setResolver(SecurityTokenResolver resolver) {
this.resolver = resolver;
}
}
|
package io.sniffy.util;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
/**
* @since 3.1.7
*/
public class ObjectWrapperFieldUpdater<C, O> extends AtomicReferenceFieldUpdater<C, O> {
private final AtomicReferenceFieldUpdater<C, O> defaultFieldUpdater;
public ObjectWrapperFieldUpdater(AtomicReferenceFieldUpdater<C, O> defaultFieldUpdater) {
this.defaultFieldUpdater = defaultFieldUpdater;
}
private boolean isWrappedObject(C wrapper) {
return wrapper instanceof ObjectWrapper;
}
@SuppressWarnings("unchecked")
private C getWrappedObject(C wrapper) {
return ((ObjectWrapper<C>) wrapper).getDelegate();
}
@Override
public O getAndSet(C obj, O newValue) {
if (isWrappedObject(obj)) {
defaultFieldUpdater.getAndSet(obj, newValue);
return defaultFieldUpdater.getAndSet(getWrappedObject(obj), newValue);
} else {
return defaultFieldUpdater.getAndSet(obj, newValue);
}
}
@Override
public boolean compareAndSet(C obj, O expect, O update) {
if (isWrappedObject(obj)) {
defaultFieldUpdater.compareAndSet(obj, expect, update);
return defaultFieldUpdater.compareAndSet(getWrappedObject(obj), expect, update);
} else {
return defaultFieldUpdater.compareAndSet(obj, expect, update);
}
}
@Override
public boolean weakCompareAndSet(C obj, O expect, O update) {
if (isWrappedObject(obj)) {
defaultFieldUpdater.weakCompareAndSet(obj, expect, update);
return defaultFieldUpdater.weakCompareAndSet(getWrappedObject(obj), expect, update);
} else {
return defaultFieldUpdater.weakCompareAndSet(obj, expect, update);
}
}
@Override
public void set(C obj, O newValue) {
defaultFieldUpdater.set(obj, newValue);
if (isWrappedObject(obj)) defaultFieldUpdater.set(getWrappedObject(obj), newValue);
}
@Override
public void lazySet(C obj, O newValue) {
defaultFieldUpdater.lazySet(obj, newValue);
if (isWrappedObject(obj)) defaultFieldUpdater.lazySet(getWrappedObject(obj), newValue);
}
@Override
public O get(C obj) {
if (isWrappedObject(obj)) {
defaultFieldUpdater.get(obj);
return defaultFieldUpdater.get(getWrappedObject(obj));
} else {
return defaultFieldUpdater.get(obj);
}
}
}
|
package gov.nih.nci.gss.scheduler;
import gov.nih.nci.gss.domain.DataService;
import gov.nih.nci.gss.domain.DataServiceGroup;
import gov.nih.nci.gss.domain.DomainAttribute;
import gov.nih.nci.gss.domain.DomainClass;
import gov.nih.nci.gss.domain.DomainModel;
import gov.nih.nci.gss.domain.GridService;
import gov.nih.nci.gss.domain.HostingCenter;
import gov.nih.nci.gss.domain.PointOfContact;
import gov.nih.nci.gss.grid.DataServiceObjectCounter;
import gov.nih.nci.gss.grid.GSSCredentials;
import gov.nih.nci.gss.grid.GridAutoDiscoveryException;
import gov.nih.nci.gss.grid.GridIndexService;
import gov.nih.nci.gss.grid.GridServiceVerifier;
import gov.nih.nci.gss.support.LastRefresh;
import gov.nih.nci.gss.util.Cab2bAPI;
import gov.nih.nci.gss.util.Cab2bTranslator;
import gov.nih.nci.gss.util.GSSUtil;
import gov.nih.nci.gss.util.GridServiceDAO;
import gov.nih.nci.gss.util.NamingUtil;
import gov.nih.nci.gss.util.Cab2bAPI.Cab2bService;
import gov.nih.nci.system.applicationservice.ApplicationException;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.http.impl.cookie.DateUtils;
import org.apache.log4j.Logger;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.exception.ConstraintViolationException;
public class GridDiscoveryServiceJob {
private static Logger logger = Logger.getLogger(GridDiscoveryServiceJob.class);
private static final int NUM_QUERY_THREADS = 20;
private static final long DAY_IN_MILLIS = 1000 * 60 * 60 * 24;
private static final int MAX_COUNT_ERROR_LEN = 5000;
private static final int MAX_COUNT_STACKTRACE_LEN = 50000;
private static final String STATUS_CHANGE_ACTIVE = "ACTIVE";
private static final String STATUS_CHANGE_INACTIVE = "INACTIVE";
private Cab2bTranslator xlateUtil = null;
private NamingUtil namingUtil = null;
private Map<String,Cab2bService> cab2bServices = null;
/** Cache for JSON responses */
private Map cache;
private SessionFactory sessionFactory;
private Session hibernateSession;
public GridDiscoveryServiceJob() {
logger.info("Creating GridDiscoveryServiceJob");
}
public void setCache(Map cache) {
this.cache = cache;
}
public void setSessionFactory(SessionFactory sessionFactory) {
logger.info("Setting session factory: "+sessionFactory);
this.sessionFactory = sessionFactory;
}
public void execute() throws Exception {
// Initialize helper classes
this.xlateUtil = new Cab2bTranslator(sessionFactory);
this.namingUtil = new NamingUtil(sessionFactory);
Cab2bAPI cab2bAPI = new Cab2bAPI(xlateUtil);
this.cab2bServices = cab2bAPI.getServices();
Map<String,GridService> gridNodes = null;
try {
logger.info("Logged into Globus: "+GSSCredentials.getCredential());
// Get services from Grid Index Service
gridNodes = populateRemoteServices();
}
catch (GridAutoDiscoveryException e) {
Throwable root = GSSUtil.getRootException(e);
if (root instanceof SocketException ||
root instanceof SocketTimeoutException) {
logger.warn("Could not connect to index service.");
return;
}
else {
throw e;
}
}
try {
hibernateSession = sessionFactory.openSession();
// Merge with our database to get a complete list of all services we know about
Map<String,GridService> allServices = mergeWithGss(gridNodes);
if (allServices != null) {
// Verify accessibility
verifyAccessibility(allServices);
// Update counts
updateCounts(allServices);
// Update services as necessary or add new ones
saveServices(allServices, gridNodes.size());
// Clear the JSON cache
cache.clear();
}
}
finally {
hibernateSession.close();
hibernateSession = null;
}
}
/**
* @return List<GridNodeBean>
*/
private Map<String,GridService> populateRemoteServices()
throws GridAutoDiscoveryException {
// Build a hash on URL for GridServices
HashMap<String,GridService> serviceMap = new HashMap<String,GridService>();
logger.info("Discovering grid services");
// auto-discover grid nodes and save in session
List<GridService> list = GridIndexService.discoverGridServices();
if (list != null) {
for (GridService service : list) {
if (serviceMap.containsKey(service.getUrl())) {
logger.warn("Index Service returned duplicate service URL: "+
service.getUrl());
}
serviceMap.put(service.getUrl(), service);
}
}
return serviceMap;
}
/**
* Merge the services reported by the Index Service with what is
* currently in the GSS database.
* @param gridNodes
* @return
*/
private HashMap<String,GridService> mergeWithGss(Map<String,GridService> gridNodes) {
HashMap<String,GridService> allServices = new HashMap<String,GridService>();
int countNew = 0;
int countUpdated = 0;
int countInactive = 0;
logger.info("Merging service metadata...");
Collection<GridService> currentServices = null;
Collection<HostingCenter> currentHosts = null;
HashMap<String,GridService> serviceMap = null;
HashMap<String,HostingCenter> hostMap = null;
try {
currentServices = GridServiceDAO.getServices(null,hibernateSession);
// Build a hash on URL for GridServices
serviceMap = new HashMap<String,GridService>();
for (GridService service : currentServices) {
serviceMap.put(service.getUrl(), service);
}
currentHosts = GridServiceDAO.getHosts(null,hibernateSession);
// Build a hash on hosting center long name for HostingCenters
hostMap = new HashMap<String,HostingCenter>();
for (HostingCenter host : currentHosts) {
hostMap.put(host.getLongName(), host);
}
}
catch (ApplicationException e) {
logger.error("Error getting service metadata from GSS database",e);
return null;
}
// Walk the list of gridNodes and update the current services and hosting centers where necessary
for (GridService service : gridNodes.values()) {
logger.info("
logger.info("Name: "+service.getName());
logger.info("URL: "+service.getUrl());
// Standardize the host long name
HostingCenter thisHC = service.getHostingCenter();
String hostLongName = null;
if (thisHC != null) {
hostLongName = namingUtil.getSimpleHostName(thisHC.getLongName());
// The trim is important because MySQL will consider two
// strings equal if the only difference is trailing whitespace
hostLongName = hostLongName.trim();
if (!thisHC.getLongName().equals(hostLongName)) {
logger.info("Host name: "+hostLongName+" (was "+thisHC.getLongName()+")");
thisHC.setLongName(hostLongName);
}
else {
logger.info("Host name: "+thisHC.getLongName());
}
// Create persistent identifier based on the long name
thisHC.setIdentifier(GSSUtil.generateHostIdentifier(thisHC));
// Hide this host?
thisHC.setHiddenDefault(namingUtil.isHidden(thisHC.getLongName()));
}
// Check to see if the hosting center already exists.
if (thisHC != null) {
if (hostMap.containsKey(hostLongName)) {
HostingCenter matchingHost = hostMap.get(hostLongName);
matchingHost = updateHostData(matchingHost, thisHC);
service.setHostingCenter(matchingHost);
logger.info("Using existing host with id: "+matchingHost.getId());
}
else {
hostMap.put(hostLongName, thisHC);
}
}
if (serviceMap.containsKey(service.getUrl())) {
logger.info("Service already exists, updating...");
countUpdated++;
// This service is already in the list of current services
GridService matchingSvc = serviceMap.get(service.getUrl());
// Update any new data about this service
matchingSvc = updateServiceData(matchingSvc, service);
// Check to see if this service is active once again
if (STATUS_CHANGE_INACTIVE.equals(matchingSvc.getLastStatus())) {
// Service was marked as inactive, need to make it active now
service.setLastStatus(createStatus(true));
}
allServices.put(matchingSvc.getUrl(),matchingSvc);
}
else {
logger.info("Creating new service...");
countNew++;
// Mark this service as published/discovered now. Also, give it a default status change of "up".
// TODO: Is there a better "publish date" in the service metadata?
service.setPublishDate(new Date());
// Set up service simple name and linkage to correct caB2B model group
service.setSimpleName(namingUtil.getSimpleServiceName(service.getName()));
// Hide some core infrastructure services
service.setHiddenDefault(namingUtil.isHidden(service.getName()));
// Create a persistent identifier based on the URL
service.setIdentifier(GSSUtil.generateServiceIdentifier(service));
if (service instanceof DataService) {
DataService dataService = (DataService)service;
dataService = updateCab2bData(dataService);
}
service.setLastStatus(createStatus(true));
allServices.put(service.getUrl(),service);
}
service.setLastUpdate(new Date());
}
// Mark the services we didn't see as inactive
for (GridService service : currentServices) {
if (!gridNodes.containsKey(service.getUrl())) {
countInactive++;
logger.info("
logger.info("Name: "+service.getName());
logger.info("URL: "+service.getUrl());
logger.info("Not found in index service metadata.");
service.setLastStatus(createStatus(false));
allServices.put(service.getUrl(),service);
}
}
logger.info("Database will be updated as follows:");
logger.info("New services found: "+countNew);
logger.info("Existing services updated: "+countUpdated);
logger.info("Existing services marked inactive: "+countInactive);
return allServices;
}
private void updateCounts(Map<String,GridService> gridNodes) {
ExecutorService parallelExecutor = Executors.newFixedThreadPool(NUM_QUERY_THREADS);
List<DataServiceObjectCounter> counters = new ArrayList<DataServiceObjectCounter>();
logger.info("Updating counts...");
for (GridService service : gridNodes.values()) {
if (service instanceof DataService) {
DataService dataService = (DataService)service;
DomainModel model = dataService.getDomainModel();
if (model == null) continue;
// clear everything so that there's no stale data if we give up early
for(DomainClass domainClass : model.getClasses()) {
domainClass.setCount(null);
domainClass.setCountDate(null);
domainClass.setCountError(null);
domainClass.setCountStacktrace(null);
}
// Avoid services which didn't respond to a WSDL query
if (!service.getAccessible()) {
logger.info("Not attempting to count for inaccessible service: "+
service.getUrl());
continue;
}
DataServiceObjectCounter counter =
new DataServiceObjectCounter(dataService);
counters.add(counter);
parallelExecutor.submit(counter);
}
}
try {
parallelExecutor.shutdown();
logger.info("Awaiting completion of object counting...");
if (!parallelExecutor.awaitTermination(60*60, TimeUnit.SECONDS)) {
logger.info("Timed out waiting for counts to finish, disregarding remaining counts.");
// timed out, cancel the tasks
for(DataServiceObjectCounter counter : counters) {
synchronized (counter) {
counter.disregard();
}
}
}
logger.info("Object counting completed.");
}
catch (InterruptedException e) {
logger.error("Could not update object counts",e);
}
}
private void verifyAccessibility(Map<String,GridService> gridNodes) {
ExecutorService parallelExecutor = Executors.newFixedThreadPool(NUM_QUERY_THREADS);
List<GridServiceVerifier> verifiers = new ArrayList<GridServiceVerifier>();
logger.info("Verifying accessibility...");
for (GridService service : gridNodes.values()) {
GridServiceVerifier verifier =
new GridServiceVerifier(service);
verifiers.add(verifier);
parallelExecutor.submit(verifier);
}
try {
parallelExecutor.shutdown();
logger.info("Awaiting completion of service verification...");
if (!parallelExecutor.awaitTermination(60*60, TimeUnit.SECONDS)) {
logger.info("Timed out waiting for counts to finish, disregarding remaining counts.");
// timed out, cancel the tasks
for(GridServiceVerifier verifier : verifiers) {
synchronized (verifier) {
verifier.disregard();
}
}
}
logger.info("Service verification completed.");
}
catch (InterruptedException e) {
logger.error("Could not verify services",e);
}
}
/**
* Actually save all the changes made to the GSS object model.
* @param services
* @param numGridNodes
*/
private void saveServices(Map<String,GridService> services, int numGridNodes) {
logger.info("Updating GSS database...");
Transaction tx = null;
Date nowDate = new Date();
long now = nowDate.getTime();
try {
tx = hibernateSession.beginTransaction();
for(GridService service : services.values()) {
long diff = now - service.getLastUpdate().getTime();
if ((diff > DAY_IN_MILLIS && !service.getAccessible())) {
logger.info("Service defunct, deleting: "+service.getUrl());
deleteService(service);
}
else {
if ("INACTIVE".equals(service.getLastStatus())) {
logger.info("Service defunct, but not yet ready for deletion: "+service.getUrl());
}
saveService(service);
}
}
// Note that the update completed
LastRefresh lastRefresh = GridServiceDAO.getLastRefreshObject(hibernateSession);
lastRefresh.setCompletionDate(nowDate);
lastRefresh.setNumServices(new Long(numGridNodes));
hibernateSession.save(lastRefresh);
logger.info("Commiting changes to GSS database...");
tx.commit();
logger.info("Commit complete.");
}
catch (Exception e) {
if (tx != null) {
tx.rollback();
}
logger.error("Error updating GSS database",e);
}
}
private void saveService(GridService service) {
try {
// Domain classes are saved in reverse referencing order
// 1) All POCs
for (PointOfContact POC : service.getPointOfContacts()) {
logger.debug("Saving Service POC "+POC.getName());
POC.setId((Long)hibernateSession.save(POC));
}
HostingCenter hc = service.getHostingCenter();
if (hc != null) {
for (PointOfContact POC : hc.getPointOfContacts()) {
logger.debug("Saving Host POC "+POC.getName());
POC.setId((Long)hibernateSession.save(POC));
}
// 2) Hosting Center
if (hc.getId() == null) {
logger.debug("Saving Host: "+hc.getLongName());
// Hosting center has not been saved yet
hc.setId((Long)hibernateSession.save(hc));
}
}
// 3) Domain Model
if (service instanceof DataService) {
DomainModel model = ((DataService)service).getDomainModel();
if (model != null) {
logger.debug("Saving Domain Model: "+model.getLongName());
model.setId((Long)hibernateSession.save(model));
// 4) Domain Classes
logger.debug("Saving "+model.getClasses().size()+" Domain Classes");
for(DomainClass domainClass : model.getClasses()) {
// truncate values that are too long to fit in the DB
if (domainClass.getCountError() != null) {
if (domainClass.getCountError().length() > MAX_COUNT_ERROR_LEN) {
logger.warn("Truncating long count error for: "+service.getUrl());
domainClass.setCountError(
domainClass.getCountError().substring(
0, MAX_COUNT_ERROR_LEN-3)+"...");
}
}
if (domainClass.getCountStacktrace() != null) {
if (domainClass.getCountStacktrace().length() > MAX_COUNT_STACKTRACE_LEN) {
logger.warn("Truncating long count stacktrace for: "+service.getUrl());
domainClass.setCountStacktrace(
domainClass.getCountStacktrace().substring(
0, MAX_COUNT_STACKTRACE_LEN-3)+"...");
}
}
domainClass.setId((Long)hibernateSession.save(domainClass));
// 5) Domain Attributes
if (domainClass.getAttributes() != null) {
logger.debug("Saving "+domainClass.getAttributes().size()+" Domain Attributes");
for(DomainAttribute domainAttr : domainClass.getAttributes()) {
domainAttr.setId((Long)hibernateSession.save(domainAttr));
}
}
else {
logger.debug("Null Domain Attributes List");
}
}
}
}
// 5) Grid Service
logger.debug("Saving Service: "+service.getName());
service.setId((Long)hibernateSession.save(service));
}
catch (ConstraintViolationException e) {
logger.warn("Duplicate object for: " + service.getUrl(),e);
}
catch (RuntimeException e) {
logger.warn("Unable to save GridService",e);
}
}
private void deleteService(GridService service) {
try {
// Domain classes are deleted in referencing order
// 1) Grid Service
logger.info("Deleting Service: "+service.getName());
hibernateSession.delete(service);
if (service instanceof DataService) {
DomainModel model = ((DataService)service).getDomainModel();
if (model != null) {
for(DomainClass domainClass : model.getClasses()) {
// 2) Domain Attributes
if (domainClass.getAttributes() != null) {
logger.info("Deleting "+domainClass.getAttributes().size()+" Domain Attributes");
for(DomainAttribute domainAttr : domainClass.getAttributes()) {
hibernateSession.delete(domainAttr);
}
}
// 3) Domain Classes
hibernateSession.delete(domainClass);
}
logger.info("Deleting "+model.getClasses().size()+" Domain Classes");
// 4) Domain Model
logger.info("Deleting Domain Model: "+model.getLongName());
hibernateSession.delete(model);
}
}
// 1) All POCs
for (PointOfContact POC : service.getPointOfContacts()) {
logger.info("Deleting Service POC "+POC.getName());
hibernateSession.delete(POC);
}
// Don't delete the hosting center in case other services are there,
// or if services are added at a later date
}
catch (ConstraintViolationException e) {
logger.warn("Violated constraint deleting: " + service.getUrl(),e);
}
catch (RuntimeException e) {
logger.warn("Unable to delete GridService",e);
}
}
private String createStatus(Boolean isActive) {
return isActive ? STATUS_CHANGE_ACTIVE : STATUS_CHANGE_INACTIVE;
}
private HostingCenter updateHostData(HostingCenter matchingHost,
HostingCenter host) {
// Copy over data from the new host data
// - Do not overwrite: long name (unique key), id (db primary key)
matchingHost.setHiddenDefault(host.getHiddenDefault());
matchingHost.setCountryCode(host.getCountryCode());
matchingHost.setLocality(host.getLocality());
matchingHost.setPostalCode(host.getPostalCode());
matchingHost.setShortName(host.getShortName());
matchingHost.setStateProvince(host.getStateProvince());
matchingHost.setStreet(host.getStreet());
return matchingHost;
}
private DataService updateCab2bData(DataService dataService) {
Cab2bService cab2bService = cab2bServices.get(dataService.getUrl());
if (cab2bService != null) {
// Translate the caB2B model group to a service group
DataServiceGroup group = xlateUtil.getServiceGroupForModelGroup(
cab2bService.getModelGroupName());
// Populate service attributes
dataService.setGroup(group);
dataService.setSearchDefault(cab2bService.isSearchDefault());
if (group == null) {
logger.info("Found service in caB2B but could not " +
"translate group "+cab2bService.getModelGroupName());
}
else {
logger.info("Found service in caB2B under group "+
group.getName()+" with searchDefault="+
cab2bService.isSearchDefault());
}
}
else {
dataService.setSearchDefault(false);
}
return dataService;
}
private GridService updateServiceData(GridService matchingSvc,
GridService service) {
// Copy over data from the new service
// - Do not overwrite: url (unique keys), id (db primary key), publish date (should stay the original value)
matchingSvc.setName(service.getName());
matchingSvc.setSimpleName(namingUtil.getSimpleServiceName(service.getName()));
// Hide this service?
matchingSvc.setHiddenDefault(namingUtil.isHidden(service.getName()));
matchingSvc.setVersion(service.getVersion());
matchingSvc.setDescription(service.getDescription());
matchingSvc.setHostingCenter(service.getHostingCenter());
if (matchingSvc instanceof DataService && service instanceof DataService) {
DataService dataService = (DataService)service;
DataService matchingDataSvc = (DataService)matchingSvc;
// We are consciously overwriting things here that likely will not change,
// since they are based on the URL, which is guaranteed to be the same if we
// call this function. However, on the off chance that the DB lookup tables or
// caB2B content has changed, we need to overwrite here to be sure.
updateCab2bData(matchingDataSvc);
// Update domain model
DomainModel model = dataService.getDomainModel();
DomainModel matchingModel = matchingDataSvc.getDomainModel();
if (matchingModel == null) {
logger.warn("Existing data service has no model: "+service.getUrl());
matchingDataSvc.setDomainModel(model);
return matchingSvc;
}
if (model == null) {
logger.warn("Data service has no model: "+service.getUrl());
return matchingSvc;
}
matchingModel.setDescription(model.getDescription());
matchingModel.setLongName(model.getLongName());
matchingModel.setVersion(model.getVersion());
// Add existing classes to a map
Map<String,DomainClass> existingClasses = new HashMap<String,DomainClass>();
for(DomainClass domainClass : matchingModel.getClasses()) {
String fullClass = domainClass.getDomainPackage()+"."+domainClass.getClassName();
existingClasses.put(fullClass,domainClass);
logger.debug(" Existing class: "+fullClass);
}
// Go through new classes
for(DomainClass domainClass : model.getClasses()) {
String fullClass = domainClass.getDomainPackage()+"."+domainClass.getClassName();
DomainClass matchingClass = existingClasses.get(fullClass);
if (existingClasses.containsKey(fullClass)) {
// Update existing class with new metadata
matchingClass.setDescription(domainClass.getDescription());
// Add existing attributes to a map
Map<String,DomainAttribute> existingAttrs = new HashMap<String,DomainAttribute>();
for(DomainAttribute domainAttr : matchingClass.getAttributes()) {
existingAttrs.put(domainAttr.getAttributeName(),domainAttr);
logger.debug(" Existing attr: "+domainAttr.getAttributeName());
}
// Go through new attributes
for(DomainAttribute domainAttr : matchingClass.getAttributes()) {
DomainAttribute matchingAttr = existingAttrs.get(domainAttr.getAttributeName());
if (existingAttrs.containsKey(matchingAttr)) {
// Update existing attr with new metadata
matchingAttr.setCdePublicId(domainAttr.getCdePublicId());
matchingAttr.setDataTypeName(domainAttr.getDataTypeName());
}
else {
// Add new class
logger.debug(" New attr: "+domainAttr.getAttributeName());
matchingClass.getAttributes().add(domainAttr);
}
}
}
else {
// Add new class
logger.debug(" New class: "+fullClass);
matchingModel.getClasses().add(domainClass);
}
}
// TODO: handle domain class and attribute deletions
}
return matchingSvc;
}
}
|
package gov.nih.nci.gss.scheduler;
import gov.nih.nci.gss.domain.DataService;
import gov.nih.nci.gss.domain.DataServiceGroup;
import gov.nih.nci.gss.domain.GridService;
import gov.nih.nci.gss.domain.HostingCenter;
import gov.nih.nci.gss.domain.PointOfContact;
import gov.nih.nci.gss.domain.StatusChange;
import gov.nih.nci.gss.grid.GridAutoDiscoveryException;
import gov.nih.nci.gss.grid.GridIndexService;
import gov.nih.nci.gss.util.Cab2bAPI;
import gov.nih.nci.gss.util.Cab2bTranslator;
import gov.nih.nci.gss.util.GridServiceDAO;
import gov.nih.nci.gss.util.HibernateUtil;
import gov.nih.nci.gss.util.NamingUtil;
import gov.nih.nci.gss.util.Cab2bAPI.Cab2bService;
import gov.nih.nci.system.applicationservice.ApplicationException;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServlet;
import org.apache.log4j.Logger;
import org.hibernate.Session;
import org.hibernate.Transaction;
import org.hibernate.exception.ConstraintViolationException;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
public class GridDiscoveryServiceJob extends HttpServlet implements Job {
private static Logger logger = Logger
.getLogger(GridDiscoveryServiceJob.class.getName());
private static final String STATUS_CHANGE_ACTIVE = "ACTIVE";
private static final String STATUS_CHANGE_INACTIVE = "INACTIVE";
private Cab2bTranslator xlateUtil = null;
private NamingUtil namingUtil = null;
private Map<String,Cab2bService> cab2bServices = null;
/*
* (non-Javadoc)
*
* @see org.quartz.Job#execute(org.quartz.JobExecutionContext)
*/
public void execute(JobExecutionContext context)
throws JobExecutionException {
try {
this.xlateUtil = new Cab2bTranslator(HibernateUtil.getSessionFactory());
this.namingUtil = new NamingUtil(HibernateUtil.getSessionFactory());
Cab2bAPI cab2bAPI = new Cab2bAPI(xlateUtil);
this.cab2bServices = cab2bAPI.getServices();
}
catch (Exception e) {
throw new JobExecutionException(
"Could not retrieve caB2B services",e,false);
}
// Update services as necessary or add new ones
Map<String,GridService> gridNodes = populateServicesFromIndex();
updateGssServices(gridNodes);
}
public Map<String,GridService> populateServicesFromIndex() {
return populateRemoteServices();
}
/**
* @return List<GridNodeBean>
*/
private Map<String,GridService> populateRemoteServices() {
// Build a hash on URL for GridServices
HashMap<String,GridService> serviceMap = new HashMap<String,GridService>();
logger.debug("Refreshing Grid Nodes via discoverServices");
// auto-discover grid nodes and save in session
List<GridService> list = null;
try {
list = GridIndexService.discoverGridServices();
} catch (GridAutoDiscoveryException e) {
String err = "Error in discovering grid services from the index server";
logger.warn(err);
list = null;
}
if (list != null) {
for (GridService service : list) {
serviceMap.put(service.getUrl(), service);
}
}
return serviceMap;
}
private void saveService(GridService service, StatusChange sc, Session hibernateSession) {
logger.info(" - Saving GridService: " + service.getName());
try {
// Save in the following order:
// 1) All POCs
for (PointOfContact POC : service.getPointOfContacts()) {
logger.info(" - Saving Service POC "+POC.getName());
POC.setId((Long)hibernateSession.save(POC));
}
HostingCenter hc = service.getHostingCenter();
if (hc != null) {
for (PointOfContact POC : hc.getPointOfContacts()) {
logger.info(" - Saving Host POC "+POC.getName());
POC.setId((Long)hibernateSession.save(POC));
}
// 2) Hosting Center
if (hc.getId() == null) {
logger.info(" - Saving Host "+hc.getLongName());
// Hosting center has not been saved yet
hc.setId((Long)hibernateSession.save(hc));
}
}
// - 3) Domain Model (TBD)
if (service.getClass() == DataService.class) {
logger.info(" - Saving Domain Model ");
hibernateSession.save(((DataService)service).getDomainModel());
}
// - 4) Grid Services
logger.info(" - Saving Service ");
service.setId((Long)hibernateSession.save(service));
// - 5) Status Changes
if (sc != null) {
logger.info(" - Saving Status Change ");
hibernateSession.save(sc);
}
// - 6) Domain Classes (TBD)
} catch (ConstraintViolationException e) {
logger.warn("Duplicate grid service found: " + service.getUrl());
} catch (RuntimeException e) {
logger.warn("Unable to save GridService: " + e.getMessage());
}
}
private static StatusChange populateStatusChange(GridService service, Boolean isActive) {
StatusChange newSC = new StatusChange();
newSC.setChangeDate(new Date());
newSC.setGridService(service);
newSC.setNewStatus(isActive ? STATUS_CHANGE_ACTIVE : STATUS_CHANGE_INACTIVE);
return newSC;
}
private void updateGssServices(Map<String,GridService> gridNodes) {
Transaction tx = null;
Session hibernateSession = HibernateUtil.getSessionFactory().openSession();
try {
tx = hibernateSession.beginTransaction();
Collection<GridService> currentServices = GridServiceDAO.getServices(null,false,hibernateSession);
// Build a hash on URL for GridServices
HashMap<String,GridService> serviceMap = new HashMap<String,GridService>();
for (GridService service : currentServices) {
serviceMap.put(service.getUrl(), service);
}
Collection<HostingCenter> currentHosts = GridServiceDAO.getHosts(null,hibernateSession);
// Build a hash on hosting center long name for HostingCenters
HashMap<String,HostingCenter> hostMap = new HashMap<String,HostingCenter>();
for (HostingCenter host : currentHosts) {
hostMap.put(host.getLongName(), host);
}
// Walk the list of gridNodes and update the current services and hosting centers where necessary
for (GridService service : gridNodes.values()) {
logger.info("SAVE: "+service.getUrl());
// Standardize the host long name
HostingCenter thisHC = service.getHostingCenter();
String hostLongName = null;
if (thisHC != null) {
hostLongName = namingUtil.getSimpleHostName(thisHC.getLongName());
// The trim is important because MySQL will consider two
// strings equal if the only difference is trailing whitespace
hostLongName = hostLongName.trim();
if (!thisHC.getLongName().equals(hostLongName)) {
logger.info(" Changing host name: "+thisHC.getLongName()+" -> "+hostLongName);
}
thisHC.setLongName(hostLongName);
}
if (serviceMap.containsKey(service.getUrl())) {
logger.info(" Service already exists");
// This service is already in the list of current services
GridService matchingSvc = serviceMap.get(service.getUrl());
// Update any new data about this service
matchingSvc = updateServiceData(matchingSvc, service);
// Make sure the hosting center exists and is up to date
if (thisHC != null) {
if (hostMap.containsKey(hostLongName)) {
HostingCenter matchingHost = hostMap.get(hostLongName);
matchingHost = updateHostData(matchingHost, thisHC);
matchingSvc.setHostingCenter(matchingHost);
}
else {
hostMap.put(hostLongName, thisHC);
}
}
// Check to see if this service is active once again
Collection<StatusChange> changes = matchingSvc.getStatusHistory();
StatusChange mostRecentChange = changes.iterator().next();
if (STATUS_CHANGE_INACTIVE.equals(mostRecentChange.getNewStatus())) {
// Service was marked as inactive, need to make it active now
StatusChange newSC = populateStatusChange(matchingSvc, true);
matchingSvc.getStatusHistory().add(newSC);
saveService(matchingSvc,newSC,hibernateSession);
} else {
saveService(matchingSvc,null,hibernateSession);
}
} else {
logger.info(" New service");
// This is a new service.
// Check to see if the hosting center already exists.
if (thisHC != null) {
if (hostMap.containsKey(hostLongName)) {
HostingCenter matchingHost = hostMap.get(hostLongName);
matchingHost = updateHostData(matchingHost, thisHC);
service.setHostingCenter(matchingHost);
logger.info(" Using Host: "+matchingHost.getId()+" "+matchingHost.getLongName());
}
else {
hostMap.put(hostLongName, thisHC);
logger.info(" New Host: "+thisHC.getId()+" "+thisHC.getLongName());
}
}
// Mark this service as published/discovered now. Also, give it a default status change of "up".
// TODO: Is there a better "publish date" in the service metadata?
service.setPublishDate(new Date());
StatusChange sc = populateStatusChange(service, true);
Collection<StatusChange> scList = new HashSet<StatusChange>();
scList.add(sc);
service.setStatusHistory(scList);
// Set up service simple name and linkage to correct caB2B model group
service.setSimpleName(namingUtil.getSimpleServiceName(service.getName()));
if (service instanceof DataService) {
DataService dataService = (DataService)service;
// Do not select for search by default
dataService.setSearchDefault(false);
Cab2bService cab2bService = cab2bServices.get(service.getUrl());
if (cab2bService != null) {
logger.info(" Found caB2BService: "+cab2bService.getUrl()+", group "+cab2bService.getModelGroupName());
// Translate the caB2B model group to a service group
DataServiceGroup group = xlateUtil.getServiceGroupObj(
cab2bService.getModelGroupName());
// Populate service attributes
dataService.setGroup(group);
dataService.setSearchDefault(cab2bService.isSearchDefault());
}
}
saveService(service,sc,hibernateSession);
}
}
// Walk the list of currentServices and remove those not in gridNodes
for (GridService service : currentServices) {
if (!gridNodes.containsKey(service.getUrl())) {
// Check to see if this service is active once again
Collection<StatusChange> changes = service.getStatusHistory();
StatusChange mostRecentChange = changes.iterator().next();
if (STATUS_CHANGE_ACTIVE.equals(mostRecentChange.getNewStatus())) {
// Service was marked as active, need to make it inactive now
StatusChange newSC = populateStatusChange(service, false);
service.getStatusHistory().add(newSC);
saveService(service,newSC,hibernateSession);
}
}
}
tx.commit();
} catch (ApplicationException e) {
if (tx != null) {
tx.rollback();
}
e.printStackTrace();
}
finally {
hibernateSession.close();
}
}
private HostingCenter updateHostData(HostingCenter matchingHost,
HostingCenter newHC) {
// Copy over data from the new host data
// - Do not overwrite: long name (unique key), id (db primary key)
matchingHost.setCountryCode(newHC.getCountryCode());
matchingHost.setLocality(newHC.getLocality());
matchingHost.setPostalCode(newHC.getPostalCode());
matchingHost.setShortName(newHC.getShortName());
matchingHost.setStateProvince(newHC.getStateProvince());
matchingHost.setStreet(newHC.getStreet());
return matchingHost;
}
private GridService updateServiceData(GridService matchingSvc,
GridService service) {
// Copy over data from the new service
// - Do not overwrite: url (unique key), id (db primary key), publish date (should stay the original value)
matchingSvc.setName(service.getName());
matchingSvc.setSimpleName(namingUtil.getSimpleServiceName(service.getName()));
matchingSvc.setVersion(service.getVersion());
matchingSvc.setDescription(service.getDescription());
// We are consciously overwriting things here that likely will not change,
// since they are based on the URL, which is guaranteed to be the same if we
// call this function. However, on the off chance that the DB lookup tables or
// caB2B content has changed, we need to overwrite here to be sure.
if (matchingSvc instanceof DataService && service instanceof DataService) {
DataService dataService = (DataService)service;
// Do not select for search by default
((DataService)matchingSvc).setSearchDefault(false);
Cab2bService cab2bService = cab2bServices.get(service.getUrl());
if (cab2bService != null) {
logger.info(" Found caB2BService: "+cab2bService.getUrl()+", group "+cab2bService.getModelGroupName());
// Translate the caB2B model group to a service group
DataServiceGroup group = xlateUtil.getServiceGroupObj(
cab2bService.getModelGroupName());
// Populate service attributes
((DataService)matchingSvc).setGroup(group);
((DataService)matchingSvc).setSearchDefault(cab2bService.isSearchDefault());
}
// TODO: Temporary attempt to fix unloaded proxy when saving grid services
((DataService)matchingSvc).setDomainModel(null);
}
return matchingSvc;
}
}
|
package edu.wustl.cab2b.common.locator;
import edu.wustl.cab2b.common.exception.RuntimeException;
/**
* All exception occurred in {@link edu.wustl.cab2b.common.locator.Locator} will be wrapped in this exception.
* @author Chandrakant Talele
*/
public class LocatorException extends RuntimeException {
private static final long serialVersionUID = 8682893414728832937L;
/**
* @param message Message to set
* @param cause Cause for exception
*/
public LocatorException(String message, Throwable cause, String errorCode) {
super(message, (Exception) cause, errorCode);
cause.printStackTrace();
}
}
|
package com.splunk.shep.archiver.model;
import static org.testng.AssertJUnit.*;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import org.apache.commons.io.FileUtils;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.Test;
import com.splunk.shep.archiver.archive.BucketFormat;
import com.splunk.shep.testutil.UtilsFile;
@Test(groups = { "fast" })
public class BucketTest {
File rootTestDirectory;
String index = "index";
@AfterMethod(groups = { "fast" })
public void tearDown() throws IOException {
if (rootTestDirectory != null)
FileUtils.deleteDirectory(rootTestDirectory);
}
public void constructor_takingAbsolutePathToABucket_setIndex()
throws IOException {
String bucketPath = getBucketPathWithIndex();
Bucket bucket = Bucket.createWithAbsolutePath(bucketPath);
assertEquals(index, bucket.getIndex());
}
private String getBucketPathWithIndex() {
return getBucketDirectoryWithIndex(index).getAbsolutePath();
}
private File getBucketDirectoryWithIndex(String index) {
rootTestDirectory = UtilsFile.createTempDirectory();
File indexDir = UtilsFile.createDirectoryInParent(rootTestDirectory,
index);
File dbDir = UtilsFile.createDirectoryInParent(indexDir, "db");
File bucketDir = UtilsFile.createDirectoryInParent(dbDir,
getBucketName());
return bucketDir;
}
private String getBucketName() {
return "db_1326857236_1300677707_0";
}
public void constructor_absolutePathToBucketEndingWithSlash_setIndex()
throws IOException {
String bucketPath = getBucketPathWithIndex() + "/";
Bucket bucket = Bucket.createWithAbsolutePath(bucketPath);
assertEquals(index, bucket.getIndex());
}
public void createWithAbsolutePath_takingStringToAnExistingDirectory_notNullBucket()
throws IOException {
File tempDir = UtilsFile.createTempDirectory();
assertNotNull(Bucket.createWithAbsolutePath(tempDir.getAbsolutePath()));
}
@Test(expectedExceptions = { FileNotFoundException.class })
public void createWithAbsolutePath_takingStringToNonExistingDirectory_throwFileNotFoundException()
throws IOException {
File nonExistingFile = new File("does-not-exist");
assertTrue(!nonExistingFile.exists());
Bucket.createWithAbsolutePath(nonExistingFile.getAbsolutePath());
}
@Test(expectedExceptions = { FileNotDirectoryException.class })
public void createWithAbsolutePath_wherePathIsAFileNotADirectory_throwFileNotDirectoryException()
throws IOException {
File file = UtilsFile.createTestFile();
assertTrue(file.isFile());
Bucket.createWithAbsolutePath(file.getAbsolutePath());
}
public void createWithAbsolutePath_rawdataDirectoryExistsInsideBucket_getFormatReturnsSplunkBucket()
throws IOException {
File bucketDir = getBucketDirectoryWithIndex(index);
File rawdata = UtilsFile.createDirectoryInParent(bucketDir, "rawdata");
assertTrue(rawdata.exists());
Bucket bucket = Bucket.createWithAbsolutePath(bucketDir
.getAbsolutePath());
assertEquals(bucket.getFormat(), BucketFormat.SPLUNK_BUCKET);
}
/**
* Until We've implemented more bucket formats, this is what happens.<br/>
* This test should probably be removed when we get more formats.
*/
public void createWithAbsolutePath_rawdataNotInBucket_bucketFormatIsUnknown()
throws IOException {
Bucket bucket = Bucket.createWithAbsolutePath(getBucketPathWithIndex());
assertEquals(BucketFormat.UNKNOWN, bucket.getFormat());
}
public void createWithAbsolutePath_validBucketPathInput_bucketNameIsLastDirectoryInPath()
throws IOException {
String bucketPath = getBucketPathWithIndex();
String expectedName = getBucketName();
Bucket bucket = Bucket.createWithAbsolutePath(bucketPath);
assertEquals(expectedName, bucket.getName());
}
public void BucketTest_getBucketPathWithIndex_withNonEmptyIndex_endsWithExpectedPathEnding() {
String bucketPath = getBucketPathWithIndex();
String expectedBucketPathEnding = index
+ "/db/db_1326857236_1300677707_0";
assertTrue(bucketPath.endsWith(expectedBucketPathEnding));
}
}
|
package som.primitives.arithmetic;
import java.math.BigInteger;
import com.oracle.truffle.api.ExactMath;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.utilities.BranchProfile;
public abstract class ModuloPrim extends ArithmeticPrim {
private BranchProfile negativeRightOperand = new BranchProfile();
@Specialization(order = 1, rewriteOn = ArithmeticException.class)
public final long doLong(final long left, final long right) {
long l = left;
long r = right;
long result = l % r;
if (l > 0 && r < 0) {
negativeRightOperand.enter();
result = ExactMath.addExact(result, r);
}
return result;
}
@Specialization(order = 2)
public final Object doBigInteger(final BigInteger left, final BigInteger right) {
return reduceToIntIfPossible(left.mod(right));
}
@Specialization(order = 3)
public final double doDouble(final double left, final double right) {
return left % right;
}
@Specialization(order = 10)
public final Object doBigInteger(final BigInteger left, final long right) {
return doBigInteger(left, BigInteger.valueOf(right));
}
@Specialization(order = 11)
public final Object doLong(final long left, final BigInteger right) {
return doBigInteger(BigInteger.valueOf(left), right);
}
@Specialization(order = 12)
public final double doLong(final long left, final double right) {
return doDouble(left, right);
}
@Specialization(order = 13)
public final double doDouble(final double left, final long right) {
return doDouble(left, right);
}
}
|
package swift.application.social;
import java.util.HashSet;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import swift.crdt.CRDTIdentifier;
import swift.crdt.RegisterTxnLocal;
import swift.crdt.RegisterVersioned;
import swift.crdt.SetIds;
import swift.crdt.SetMsg;
import swift.crdt.SetTxnLocalId;
import swift.crdt.SetTxnLocalMsg;
import swift.crdt.interfaces.CachePolicy;
import swift.crdt.interfaces.IsolationLevel;
import swift.crdt.interfaces.Swift;
import swift.crdt.interfaces.TxnHandle;
import swift.exceptions.NetworkException;
import swift.exceptions.NoSuchObjectException;
import swift.exceptions.VersionNotFoundException;
import swift.exceptions.WrongTypeException;
// implements the social network functionality
// see wsocial_srv.h
public class SwiftSocial {
private static Logger logger = Logger.getLogger("swift.social");
{
logger.setLevel(Level.INFO);
}
// FIXME Add sessions? Local login possible? Cookies?
private User currentUser;
private Swift server;
public SwiftSocial(Swift clientServer) {
server = clientServer;
}
// FIXME Return type integer encoding error msg?
boolean login(String loginName, String passwd) {
logger.info("Got login request from user " + loginName);
// Check if user is already logged in
if (currentUser != null) {
if (loginName.equals(currentUser)) {
logger.info(loginName + " is already logged in");
return true;
} else {
logger.info("Need to log out user " + currentUser.loginName + " first!");
return false;
}
}
try {
// Check if user is known at all
// FIXME Is login possible in offline mode?
TxnHandle txn = server.beginTxn(IsolationLevel.SNAPSHOT_ISOLATION, CachePolicy.STRICTLY_MOST_RECENT, true);
@SuppressWarnings("unchecked")
User user = (User) (txn.get(NamingScheme.forUser(loginName), false, RegisterVersioned.class)).getValue();
// Check password
// FIXME We actually need an external authentification mechanism, as
// clients cannot be trusted.
// In Walter, authentification is done on server side, within the
// data center. Moving password (even if hashed) to the client is a
// security breach.
if (user != null) {
if (user.password.equals(passwd)) {
currentUser = user;
logger.info(loginName + " successfully logged in");
txn.commitAsync(null);
return true;
} else {
logger.info("Wrong password for " + loginName);
}
} else {
logger.info("User has not been registered " + loginName);
}
} catch (NetworkException e) {
e.printStackTrace();
} catch (WrongTypeException e) {
// should not happen
e.printStackTrace();
} catch (NoSuchObjectException e) {
logger.info("User " + loginName + " is not known");
} catch (VersionNotFoundException e) {
// should not happen
e.printStackTrace();
}
return false;
}
void logout(String loginName) {
currentUser = null;
// FIXME End session? handle cookies?
logger.info(loginName + " successfully logged out");
}
// FIXME Return error code?
@SuppressWarnings("unchecked")
void registerUser(String loginName, String passwd, String fullName, long birthday) {
logger.info("Got registration request for " + loginName);
// FIXME How do we guarantee unique login names?
// WalterSocial suggests using dedicated (non-replicated) login server.
TxnHandle txn = null;
try {
txn = server.beginTxn(IsolationLevel.SNAPSHOT_ISOLATION, CachePolicy.STRICTLY_MOST_RECENT, false);
RegisterTxnLocal<User> reg = (RegisterTxnLocal<User>) txn.get(NamingScheme.forUser(loginName), true,
RegisterVersioned.class);
txn.get(NamingScheme.forMessages(loginName), true, SetMsg.class);
txn.get(NamingScheme.forEvents(loginName), true, SetMsg.class);
txn.get(NamingScheme.forFriends(loginName), true, SetIds.class);
txn.get(NamingScheme.forInFriendReq(loginName), true, SetIds.class);
txn.get(NamingScheme.forOutFriendReq(loginName), true, SetIds.class);
User newUser = new User(loginName, passwd, fullName, birthday, true);
reg.set(newUser);
logger.info("Registered user: " + newUser);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (txn != null) {
txn.commitAsync(null);
}
}
}
void updateUser(boolean status, String fullName, long birthday, int maritalStatus) {
logger.info("Update user data for " + this.currentUser.loginName);
this.currentUser.active = status;
this.currentUser.fullName = fullName;
this.currentUser.birthday = birthday;
this.currentUser.maritalStatus = maritalStatus;
TxnHandle txn = null;
try {
txn = server.beginTxn(IsolationLevel.SNAPSHOT_ISOLATION, CachePolicy.CACHED, false);
RegisterTxnLocal<User> reg = (RegisterTxnLocal<User>) txn.get(
NamingScheme.forUser(this.currentUser.loginName), true, RegisterVersioned.class);
reg.set(currentUser);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (txn != null) {
txn.commitAsync(null);
}
}
}
@SuppressWarnings("unchecked")
User read(final String name, final Set<Message> messages, final Set<Message> events) {
logger.info("Get site report for " + name);
TxnHandle txn = null;
User user = null;
try {
txn = server.beginTxn(IsolationLevel.SNAPSHOT_ISOLATION, CachePolicy.CACHED, true);
RegisterTxnLocal<User> reg = (RegisterTxnLocal<User>) txn.get(NamingScheme.forUser(name), false,
RegisterVersioned.class);
user = reg.getValue();
messages.addAll(((SetTxnLocalMsg) txn.get(NamingScheme.forMessages(name), false, SetMsg.class)).getValue());
events.addAll(((SetTxnLocalMsg) txn.get(NamingScheme.forEvents(name), false, SetMsg.class)).getValue());
} catch (Exception e) {
e.printStackTrace();
} finally {
if (txn != null) {
txn.commitAsync(null);
}
}
return user;
}
// FIXME return error code?
void postMessage(String receiverName, String msg, long date) {
logger.info("Post status msg from " + this.currentUser.loginName + " for " + receiverName);
Message newMsg = new Message(msg, this.currentUser.loginName, date);
TxnHandle txn = null;
try {
txn = server.beginTxn(IsolationLevel.SNAPSHOT_ISOLATION, CachePolicy.CACHED, false);
SetTxnLocalMsg messages = (SetTxnLocalMsg) txn.get(NamingScheme.forMessages(receiverName), false,
SetMsg.class);
messages.insert(newMsg);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (txn != null) {
txn.commitAsync(null);
}
}
}
void answerFriendRequest(String requester, boolean accept) {
logger.info("Answered friend request from " + this.currentUser.loginName + " for " + requester);
TxnHandle txn = null;
try {
txn = server.beginTxn(IsolationLevel.SNAPSHOT_ISOLATION, CachePolicy.CACHED, false);
SetTxnLocalId inFriendReq = (SetTxnLocalId) txn.get(
NamingScheme.forInFriendReq(this.currentUser.loginName), false, SetIds.class);
inFriendReq.remove(NamingScheme.forUser(requester));
SetTxnLocalId outFriendReq = (SetTxnLocalId) txn.get(NamingScheme.forOutFriendReq(requester), false,
SetIds.class);
outFriendReq.remove(NamingScheme.forUser(this.currentUser.loginName));
if (accept) {
SetTxnLocalId friends = (SetTxnLocalId) txn.get(NamingScheme.forFriends(this.currentUser.loginName),
false, SetIds.class);
friends.insert(NamingScheme.forUser(requester));
SetTxnLocalId requesterFriends = (SetTxnLocalId) txn.get(NamingScheme.forFriends(requester), false,
SetIds.class);
requesterFriends.insert(NamingScheme.forUser(this.currentUser.loginName));
}
} catch (Exception e) {
e.printStackTrace();
} finally {
if (txn != null) {
txn.commitAsync(null);
}
}
}
void sendFriendRequest(String receiverName) {
logger.info("Sending friend request from to " + receiverName);
TxnHandle txn = null;
try {
txn = server.beginTxn(IsolationLevel.SNAPSHOT_ISOLATION, CachePolicy.CACHED, false);
SetTxnLocalId inFriendReq = (SetTxnLocalId) txn.get(NamingScheme.forInFriendReq(receiverName), false,
SetIds.class);
inFriendReq.insert(NamingScheme.forUser(this.currentUser.loginName));
SetTxnLocalId outFriendReq = (SetTxnLocalId) txn.get(
NamingScheme.forOutFriendReq(this.currentUser.loginName), false, SetIds.class);
outFriendReq.remove(NamingScheme.forUser(this.currentUser.loginName));
} catch (Exception e) {
e.printStackTrace();
} finally {
if (txn != null) {
txn.commitAsync(null);
}
}
}
Set<Friend> readFriendList(String name) {
logger.info("Get friends of " + name);
Set<Friend> friends = new HashSet<Friend>();
TxnHandle txn = null;
try {
txn = server.beginTxn(IsolationLevel.SNAPSHOT_ISOLATION, CachePolicy.CACHED, true);
Set<CRDTIdentifier> friendIds = ((SetTxnLocalId) txn
.get(NamingScheme.forFriends(name), false, SetIds.class)).getValue();
for (CRDTIdentifier f : friendIds) {
User u = ((RegisterTxnLocal<User>) txn.get(NamingScheme.forUser(name), false, RegisterVersioned.class))
.getValue();
friends.add(new Friend(u.fullName, f));
}
} catch (Exception e) {
e.printStackTrace();
} finally {
if (txn != null) {
txn.commitAsync(null);
}
}
return friends;
}
}
|
package dom.ids;
import java.io.PrintWriter;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import dom.util.Assertion;
import dom.ParserWrapper;
/**
* A simple program to test Document.getElementById() and the management
* of ID attributes. Originally based on dom.Counter.
* CAVEAT: Although any document can be given in argument, the test assumes
* it is given personal.xml and relies on that to function properly.
*
* @author Andy Clark, IBM
* @author Arnaud Le Hors, IBM
*
* @version $Id$
*/
public class Test {
// Constants
// feature ids
protected static final String NAMESPACES_FEATURE_ID =
"http://xml.org/sax/features/namespaces";
protected static final String VALIDATION_FEATURE_ID =
"http://xml.org/sax/features/validation";
protected static final String SCHEMA_VALIDATION_FEATURE_ID =
"http://apache.org/xml/features/validation/schema";
protected static final String SCHEMA_FULL_CHECKING_FEATURE_ID =
"http://apache.org/xml/features/validation/schema-full-checking";
protected static final String DEFERRED_DOM_FEATURE_ID =
"http://apache.org/xml/features/dom/defer-node-expansion";
// default settings
protected static final String DEFAULT_PARSER_NAME = "dom.wrappers.Xerces";
protected static final boolean DEFAULT_NAMESPACES = true;
protected static final boolean DEFAULT_VALIDATION = false;
protected static final boolean DEFAULT_SCHEMA_VALIDATION = false;
protected static final boolean DEFAULT_SCHEMA_FULL_CHECKING = false;
// Xerces specific feature
protected static final boolean DEFAULT_DEFERRED_DOM = true;
// Public methods
/** Performs the actual test. */
public void test(Document doc) {
System.out.println("DOM IDs Test...");
Element el = doc.getElementById("one.worker");
Assertion.assert(el != null);
Element el2 = doc.getElementById("one.worker there");
Assertion.assert(el2 == null);
if (el != null) {
Assertion.equals(el.getAttribute("id"), "one.worker");
el.setAttribute("id", "my.worker");
el2 = doc.getElementById("my.worker");
Assertion.assert(el2 == el);
el2 = doc.getElementById("one.worker");
Assertion.assert(el2 == null);
el.removeAttribute("id");
el2 = doc.getElementById("my.worker");
Assertion.assert(el2 == null);
}
System.out.println("done.");
} // test(Document)
// MAIN
/** Main program entry point. */
public static void main(String argv[]) {
// is there anything to do?
if (argv.length == 0) {
printUsage();
System.exit(1);
}
// variables
Test test = new Test();
ParserWrapper parser = null;
boolean namespaces = DEFAULT_NAMESPACES;
boolean validation = DEFAULT_VALIDATION;
boolean schemaValidation = DEFAULT_SCHEMA_VALIDATION;
boolean schemaFullChecking = DEFAULT_SCHEMA_FULL_CHECKING;
boolean deferredDom = DEFAULT_DEFERRED_DOM;
// process arguments
for (int i = 0; i < argv.length; i++) {
String arg = argv[i];
if (arg.startsWith("-")) {
String option = arg.substring(1);
if (option.equals("p")) {
// get parser name
if (++i == argv.length) {
System.err.println("error: Missing argument to -p"
+ " option.");
}
String parserName = argv[i];
// create parser
try {
parser = (ParserWrapper)
Class.forName(parserName).newInstance();
}
catch (Exception e) {
parser = null;
System.err.println("error: Unable to instantiate "
+ "parser (" + parserName + ")");
}
continue;
}
if (option.equalsIgnoreCase("n")) {
namespaces = option.equals("n");
continue;
}
if (option.equalsIgnoreCase("v")) {
validation = option.equals("v");
continue;
}
if (option.equalsIgnoreCase("s")) {
schemaValidation = option.equals("s");
continue;
}
if (option.equalsIgnoreCase("f")) {
schemaFullChecking = option.equals("f");
continue;
}
if (option.equalsIgnoreCase("d")) {
deferredDom = option.equals("d");
continue;
}
if (option.equals("h")) {
printUsage();
continue;
}
}
// use default parser?
if (parser == null) {
// create parser
try {
parser = (ParserWrapper)
Class.forName(DEFAULT_PARSER_NAME).newInstance();
}
catch (Exception e) {
System.err.println("error: Unable to instantiate parser ("
+ DEFAULT_PARSER_NAME + ")");
continue;
}
}
// set parser features
try {
parser.setFeature(NAMESPACES_FEATURE_ID, namespaces);
}
catch (SAXException e) {
System.err.println("warning: Parser does not support feature ("
+ NAMESPACES_FEATURE_ID + ")");
}
try {
parser.setFeature(VALIDATION_FEATURE_ID, validation);
}
catch (SAXException e) {
System.err.println("warning: Parser does not support feature ("
+ VALIDATION_FEATURE_ID + ")");
}
try {
parser.setFeature(SCHEMA_VALIDATION_FEATURE_ID,
schemaValidation);
}
catch (SAXException e) {
System.err.println("warning: Parser does not support feature ("
+ SCHEMA_VALIDATION_FEATURE_ID + ")");
}
try {
parser.setFeature(SCHEMA_FULL_CHECKING_FEATURE_ID,
schemaFullChecking);
}
catch (SAXException e) {
System.err.println("warning: Parser does not support feature ("
+ SCHEMA_FULL_CHECKING_FEATURE_ID + ")");
}
if (parser instanceof dom.wrappers.Xerces) {
try {
parser.setFeature(DEFERRED_DOM_FEATURE_ID,
deferredDom);
}
catch (SAXException e) {
System.err.println("warning: Parser does not support " +
"feature (" +
DEFERRED_DOM_FEATURE_ID + ")");
}
}
// parse file
try {
Document document = null;
document = parser.parse(arg);
test.test(document);
}
catch (SAXParseException e) {
// ignore
}
catch (Exception e) {
System.err.println("error: Parse error occurred - " +
e.getMessage());
Exception se = e;
if (e instanceof SAXException) {
se = ((SAXException)e).getException();
}
if (se != null)
se.printStackTrace(System.err);
else
e.printStackTrace(System.err);
}
}
} // main(String[])
// Private static methods
/** Prints the usage. */
private static void printUsage() {
System.err.println("usage: java dom.ids.Test (options) " +
"...data/personal.xml");
System.err.println();
System.err.println("options:");
System.err.println(" -p name Select parser by name.");
System.err.println(" -d | -D Turn on/off (Xerces) deferred DOM.");
System.err.println(" -n | -N Turn on/off namespace processing.");
System.err.println(" -v | -V Turn on/off validation.");
System.err.println(" -s | -S Turn on/off Schema validation " +
"support.");
System.err.println(" NOTE: Not supported by all parsers.");
System.err.println(" -f | -F Turn on/off Schema full checking.");
System.err.println(" NOTE: Requires use of -s and not " +
"supported by all parsers.");
System.err.println(" -h This help screen.");
System.err.println();
System.err.println("defaults:");
System.err.println(" Parser: " + DEFAULT_PARSER_NAME);
System.err.println(" Xerces Deferred DOM: " +
(DEFAULT_DEFERRED_DOM ? "on" : "off"));
System.err.println(" Namespaces: " +
(DEFAULT_NAMESPACES ? "on" : "off"));
System.err.println(" Validation: " +
(DEFAULT_VALIDATION ? "on" : "off"));
System.err.println(" Schema: " +
(DEFAULT_SCHEMA_VALIDATION ? "on" : "off"));
System.err.println(" Schema full checking: " +
(DEFAULT_SCHEMA_FULL_CHECKING ? "on" : "off"));
} // printUsage()
} // class Test
|
// Various DOM tests.
// Contents include
// 1. Basic functionality for DOMString
// 2. Regression tests for bugs fixed.
// All individual are wrapped in a memory leak checker.
// This is NOT a complete test of DOM functionality.
package dom.mem;
import org.w3c.dom.*;
import org.apache.xerces.dom.DocumentImpl;
import org.apache.xerces.dom.DOMImplementationImpl;
import org.apache.xerces.dom.NotationImpl;
import java.lang.reflect.*;
import dom.util.Assertion;
public class Test {
/**
* version 3.0 01/25/99
*
* @return boolean
* @param node java.lang.Object
* @param mNameIndex int
* @param signatureIndex int
* @param parameters java.lang.Object[]
* @param code short
*
* @author Philip W. Davis
*/
public static boolean DOMExceptionsTest(Object node,
String methodName,
Class[] methodSignature,
Object[] parameters,
short code)
{
boolean asExpected = false;
Method method;
try {
method = node.getClass().getMethod(methodName,methodSignature);
method.invoke(node, parameters);
} catch(InvocationTargetException exc) {
Throwable realE = exc.getTargetException();
if(realE instanceof DOMException) {
asExpected = (((DOMException)realE).code== code);
if(!asExpected)
System.out.println("Wrong DOMException(" +
((DOMException)realE).code + ")");
} else {
System.out.println("Wrong Exception (" + code + ")");
}
if(!asExpected) {
System.out.println("Expected DOMException (" +
code + ") not thrown");
}
} catch(Exception exc) {
System.out.println("test invocation failure (" + exc + ")");
}
return (asExpected);
}
public static void main(String argv[])
{
System.out.print("DOM Memory Test.\n");
// Test Doc01 Create a new empty document
{
Document doc;
doc = new DocumentImpl();
}
// Test Doc02 Create one of each kind of node using the
// document createXXX methods.
// Watch for memory leaks.
{
// Do all operations in a preconditioning step, to force the
// creation of implementation objects that are set up on first use.
// Don't watch for leaks in this block (no / )
Document doc = new DocumentImpl();
Element el = doc.createElement("Doc02Element");
DocumentFragment frag = doc.createDocumentFragment ();
Text text = doc.createTextNode("Doc02TextNode");
Comment comment = doc.createComment("Doc02Comment");
CDATASection cdataSec = doc.createCDATASection("Doc02CDataSection");
DocumentType docType = doc.getImplementation().createDocumentType("Doc02DocumentType", null, null);
Notation notation = ((DocumentImpl) doc).createNotation("Doc02Notation");
ProcessingInstruction pi = doc.createProcessingInstruction("Doc02PITarget",
"Doc02PIData");
NodeList nodeList = doc.getElementsByTagName("*");
}
{
Document doc = new DocumentImpl();
Element el = doc.createElement("Doc02Element");
}
{
Document doc = new DocumentImpl();
DocumentFragment frag = doc.createDocumentFragment ();
};
{
Document doc = new DocumentImpl();
Element el = doc.createElement("Doc02Element");
}
{
Document doc = new DocumentImpl();
Text text = doc.createTextNode("Doc02TextNode");
}
{
Document doc = new DocumentImpl();
Comment comment = doc.createComment("Doc02Comment");
}
{
Document doc = new DocumentImpl();
CDATASection cdataSec = doc.createCDATASection("Doc02CDataSection");
}
{
Document doc = new DocumentImpl();
DocumentType docType = doc.getImplementation().createDocumentType("Doc02DocumentType", null, null);
}
{
Document doc = new DocumentImpl();
Notation notation = ((DocumentImpl)doc).createNotation("Doc02Notation");
}
{
Document doc = new DocumentImpl();
ProcessingInstruction pi = doc.createProcessingInstruction("Doc02PITarget",
"Doc02PIData");
}
{
Document doc = new DocumentImpl();
Attr attribute = doc.createAttribute("Doc02Attribute");
}
{
Document doc = new DocumentImpl();
EntityReference er = doc.createEntityReference("Doc02EntityReference");
}
{
Document doc = new DocumentImpl();
NodeList nodeList = doc.getElementsByTagName("*");
}
// Doc03 - Create a small document tree
{
Document doc = new DocumentImpl();
Element rootEl = doc.createElement("Doc03RootElement");
doc.appendChild(rootEl);
Text textNode = doc.createTextNode("Doc03 text stuff");
rootEl.appendChild(textNode);
NodeList nodeList = doc.getElementsByTagName("*");
};
// Attr01
{
Document doc = new DocumentImpl();
Element rootEl = doc.createElement("RootElement");
doc.appendChild(rootEl);
{
Attr attr01 = doc.createAttribute("Attr01");
rootEl.setAttributeNode(attr01);
}
{
Attr attr02 = doc.createAttribute("Attr01");
rootEl.setAttributeNode(attr02);
}
};
// Attr02
{
Document doc = new DocumentImpl();
Element rootEl = doc.createElement("RootElement");
doc.appendChild(rootEl);
Attr attr01 = doc.createAttribute("Attr02");
rootEl.setAttributeNode(attr01);
Attr attr02 = doc.createAttribute("Attr02");
rootEl.setAttributeNode(attr02);
}
// Attr03
{
Document doc = new DocumentImpl();
Element rootEl = doc.createElement("RootElement");
doc.appendChild(rootEl);
Attr attr01 = doc.createAttribute("Attr03");
rootEl.setAttributeNode(attr01);
attr01.setValue("Attr03Value1");
attr01.setValue("Attr03Value2");
}
// Text01
{
Document doc = new DocumentImpl();
Element rootEl = doc.createElement("RootElement");
doc.appendChild(rootEl);
Text txt1 = doc.createTextNode("Hello Goodbye");
rootEl.appendChild(txt1);
txt1.splitText(6);
rootEl.normalize();
}
// Notation01
{
/*
DOMImplementation impl = DOMImplementationImpl.getDOMImplementation();
DocumentType dt =
impl.createDocumentType("DocType_for_Notation01", null, null, null);
doc.appendChild(dt);
NamedNodeMap notationMap = dt.getNotations();
Notation nt1 = ((DocumentImpl) doc).createNotation("Notation01");
((NotationImpl) nt1).setPublicId("Notation01PublicId");
notationMap.setNamedItem (nt1);
Notation nt2 = (Notation)notationMap.getNamedItem("Notation01");
Assertion.assert(nt1==nt2);
nt2 = new NotationImpl((DocumentImpl)doc, null);
nt1 = null;
nt2 = (Notation)notationMap.getNamedItem("Notation01");
*/
}
// NamedNodeMap01 - comparison operators.
{
NamedNodeMap nnm = null;
Assertion.assert(nnm == null);
Document doc = new DocumentImpl();
nnm = doc.getAttributes(); // Should be null, because node type
// is not Element.
Assertion.assert(nnm == null);
Assertion.assert(!(nnm != null));
Element el = doc.createElement("NamedNodeMap01");
NamedNodeMap nnm2 = el.getAttributes(); // Should be an empty, but non-null map.
Assertion.assert(nnm2 != null);
Assertion.assert(nnm != nnm2);
nnm = nnm2;
Assertion.assert(nnm == nnm2);
}
// importNode quick test
{
Document doc1 = new DocumentImpl();
Document doc2 = new DocumentImpl();
Element el1 = doc1.createElement("abc");
doc1.appendChild(el1);
Assertion.assert(el1.getParentNode() != null);
el1.setAttribute("foo", "foovalue");
Node el2 = doc2.importNode(el1, true);
Assertion.assert(el2.getParentNode() == null);
String tagName = el2.getNodeName();
Assertion.equals(tagName, "abc");
Assertion.assert(el2.getOwnerDocument() == doc2);
Assertion.equals(((Element) el2).getAttribute("foo"), "foovalue");
Assertion.assert(doc1 != doc2);
}
// getLength() tests. Both Node CharacterData and NodeList implement
// getLength(). Early versions of the DOM had a clash
// between the two, originating in the implementation class
// hirearchy, which has NodeList as a (distant) base class
// of CharacterData. This is a regression test to verify
// that the problem stays fixed.
{
Document doc = new DocumentImpl();
Text tx = doc.createTextNode("Hello");
Element el = doc.createElement("abc");
el.appendChild(tx);
int textLength = tx.getLength();
Assertion.assert(textLength == 5);
NodeList nl = tx.getChildNodes();
int nodeListLen = nl.getLength();
Assertion.assert(nodeListLen == 0);
nl = el.getChildNodes();
nodeListLen = nl.getLength();
Assertion.assert(nodeListLen == 1);
}
// NodeList - comparison operators, basic operation.
{
NodeList nl = null;
NodeList nl2 = null;
Assertion.assert(nl == null);
Assertion.assert(!(nl != null));
Assertion.assert(nl == nl2);
Document doc = new DocumentImpl();
nl = doc.getChildNodes(); // Should be non-null, but empty
Assertion.assert(nl != null);
int len = nl.getLength();
Assertion.assert(len == 0);
Element el = doc.createElement("NodeList01");
doc.appendChild(el);
len = nl.getLength();
Assertion.assert(len == 1);
Assertion.assert(nl != nl2);
nl2 = nl;
Assertion.assert(nl == nl2);
}
// Name validity checking.
{
Document doc = new DocumentImpl();
Assertion.assert(DOMExceptionsTest(doc, "createElement",
new Class[]{String.class},
new Object[]{"!@@ bad element name"},
DOMException.INVALID_CHARACTER_ERR));
}
// Assignment ops return value
{
Document doc = new DocumentImpl();
Element el = doc.createElement("NodeList01");
doc.appendChild(el);
Element n1, n2, n3;
n1 = n2 = n3 = el;
Assertion.assert(n1 == n2);
Assertion.assert(n1 == n3);
Assertion.assert(n1 == el);
Assertion.assert(n1 != null);
n1 = n2 = n3 = null;
Assertion.assert(n1 == null);
}
// Cloning of a node with attributes. Regression test for a ref counting
// bug in attributes of cloned nodes that occured when the "owned" flag
// was not set in the clone.
{
Document doc = new DocumentImpl();
Element root = doc.createElement("CTestRoot");
root.setAttribute("CTestAttr", "CTestAttrValue");
String s = root.getAttribute("CTestAttr");
Assertion.equals(s, "CTestAttrValue");
Element cloned = (Element)root.cloneNode(true);
Attr a = cloned.getAttributeNode("CTestAttr");
Assertion.assert(a != null);
s = a.getValue();
Assertion.equals(s, "CTestAttrValue");
a = null;
a = cloned.getAttributeNode("CTestAttr");
Assertion.assert(a != null);
s = a.getValue();
Assertion.equals(s, "CTestAttrValue");
}
// DOM Level 2 tests. These should be split out as a separate test.
// hasFeature. The set of supported options tested here is for Xerces 1.1
{
DOMImplementation impl = DOMImplementationImpl.getDOMImplementation();
Assertion.assert(impl.hasFeature("XML", "2.0") == true);
Assertion.assert(impl.hasFeature("XML", null) == true);
// We also support 1.0
Assertion.assert(impl.hasFeature("XML", "1.0") == true);
Assertion.assert(impl.hasFeature("XML", "3.0") == false);
Assertion.assert(impl.hasFeature("Traversal", null) == true);
Assertion.assert(impl.hasFeature("HTML", null) == false);
Assertion.assert(impl.hasFeature("Views", null) == false);
Assertion.assert(impl.hasFeature("StyleSheets", null) == false);
Assertion.assert(impl.hasFeature("CSS", null) == false);
Assertion.assert(impl.hasFeature("CSS2", null) == false);
Assertion.assert(impl.hasFeature("Events", null) == true);
Assertion.assert(impl.hasFeature("UIEvents", null) == false);
Assertion.assert(impl.hasFeature("MouseEvents", null) == false);
Assertion.assert(impl.hasFeature("MutationEvents", null) == true);
Assertion.assert(impl.hasFeature("HTMLEvents", null) == false);
Assertion.assert(impl.hasFeature("Range", null) == false);
}
// CreateDocumentType
{
DOMImplementation impl = DOMImplementationImpl.getDOMImplementation();
String qName = "foo:docName";
String pubId = "pubId";
String sysId = "http://sysId";
DocumentType dt = impl.createDocumentType(qName, pubId, sysId);
Assertion.assert(dt != null);
Assertion.assert(dt.getNodeType() == Node.DOCUMENT_TYPE_NODE);
Assertion.equals(dt.getNodeName(), qName);
Assertion.assert(dt.getNamespaceURI() == null);
Assertion.assert(dt.getPrefix() == null);
Assertion.assert(dt.getLocalName() == null);
Assertion.equals(dt.getPublicId(), pubId);
Assertion.equals(dt.getSystemId(), sysId);
Assertion.assert(dt.getInternalSubset() == null);
Assertion.assert(dt.getOwnerDocument() == null);
NamedNodeMap nnm = dt.getEntities();
Assertion.assert(nnm.getLength() == 0);
nnm = dt.getNotations();
Assertion.assert(nnm.getLength() == 0);
// Qualified name without prefix should also work.
qName = "docName";
dt = impl.createDocumentType(qName, pubId, sysId);
Assertion.assert(dt != null);
Assertion.assert(dt.getNodeType() == Node.DOCUMENT_TYPE_NODE);
Assertion.equals(dt.getNodeName(), qName);
Assertion.assert(dt.getNamespaceURI() == null);
Assertion.assert(dt.getPrefix() == null);
Assertion.assert(dt.getLocalName() == null);
Assertion.equals(dt.getPublicId(), pubId);
Assertion.equals(dt.getSystemId(), sysId);
Assertion.assert(dt.getInternalSubset() == null);
Assertion.assert(dt.getOwnerDocument() == null);
// Creating a DocumentType with invalid or malformed qName should fail.
Assertion.assert(DOMExceptionsTest(impl, "createDocumentType",
new Class[]{String.class, String.class, String.class},
new Object[]{"<docName", pubId, sysId},
DOMException.INVALID_CHARACTER_ERR));
Assertion.assert(DOMExceptionsTest(impl, "createDocumentType",
new Class[]{String.class, String.class, String.class},
new Object[]{":docName", pubId, sysId},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(impl, "createDocumentType",
new Class[]{String.class, String.class, String.class},
new Object[]{"docName:", pubId, sysId},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(impl, "createDocumentType",
new Class[]{String.class, String.class, String.class},
new Object[]{"<doc::Name", pubId, sysId},
DOMException.INVALID_CHARACTER_ERR));
Assertion.assert(DOMExceptionsTest(impl, "createDocumentType",
new Class[]{String.class, String.class, String.class},
new Object[]{"<doc:N:ame", pubId, sysId},
DOMException.INVALID_CHARACTER_ERR));
}
// DOMImplementation.CreateDocument
{
DOMImplementation impl = DOMImplementationImpl.getDOMImplementation();
String qName = "foo:docName";
String pubId = "pubId";
String sysId = "http://sysId";
DocumentType dt = impl.createDocumentType(qName, pubId, sysId);
String docNSURI = "http://document.namespace";
Document doc = impl.createDocument(docNSURI, qName, dt);
Assertion.assert(dt.getOwnerDocument() == doc);
Assertion.assert(doc.getOwnerDocument() == null);
Assertion.assert(doc.getNodeType() == Node.DOCUMENT_NODE);
Assertion.assert(doc.getDoctype() == dt);
Assertion.equals(doc.getNodeName(), "#document");
Assertion.assert(doc.getNodeValue() == null);
Element el = doc.getDocumentElement();
Assertion.equals(el.getLocalName(), "docName");
Assertion.equals(el.getNamespaceURI(), docNSURI);
Assertion.equals(el.getNodeName(), qName);
Assertion.assert(el.getOwnerDocument() == doc);
Assertion.assert(el.getParentNode() == doc);
Assertion.equals(el.getPrefix(), "foo");
Assertion.equals(el.getTagName(), qName);
Assertion.assert(el.hasChildNodes() == false);
// Creating a second document with the same docType object should fail.
Assertion.assert(DOMExceptionsTest(impl, "createDocument",
new Class[]{String.class,
String.class,
DocumentType.class},
new Object[]{docNSURI, qName, dt},
DOMException.WRONG_DOCUMENT_ERR));
// Namespace tests of createDocument are covered by createElementNS below
}
// CreateElementNS methods
{
// Set up an initial (root element only) document.
DOMImplementation impl = DOMImplementationImpl.getDOMImplementation();
String qName = "foo:docName";
String pubId = "pubId";
String sysId = "http://sysId";
DocumentType dt = impl.createDocumentType(qName, pubId, sysId);
String docNSURI = "http://document.namespace";
Document doc = impl.createDocument(docNSURI, qName, dt);
Element rootEl = doc.getDocumentElement();
// CreateElementNS
Element ela = doc.createElementNS("http://nsa", "a:ela"); // prefix and URI
Element elb = doc.createElementNS("http://nsb", "elb"); // URI, no prefix.
Element elc = doc.createElementNS(null, "elc"); // No URI, no prefix.
rootEl.appendChild(ela);
rootEl.appendChild(elb);
rootEl.appendChild(elc);
Assertion.equals(ela.getNodeName(), "a:ela");
Assertion.equals(ela.getNamespaceURI(), "http://nsa");
Assertion.equals(ela.getPrefix(), "a");
Assertion.equals(ela.getLocalName(), "ela");
Assertion.equals(ela.getTagName(), "a:ela");
Assertion.equals(elb.getNodeName(), "elb");
Assertion.equals(elb.getNamespaceURI(), "http://nsb");
Assertion.assert(elb.getPrefix() == null);
Assertion.equals(elb.getLocalName(), "elb");
Assertion.equals(elb.getTagName(), "elb");
Assertion.equals(elc.getNodeName(), "elc");
Assertion.assert(elc.getNamespaceURI() == null);
Assertion.assert(elc.getPrefix() == null);
Assertion.equals(elc.getLocalName(), "elc");
Assertion.equals(elc.getTagName(), "elc");
// Badly formed qualified name
Assertion.assert(DOMExceptionsTest(doc, "createElementNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "<a"},
DOMException.INVALID_CHARACTER_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createElementNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", ":a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createElementNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "a:"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createElementNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "a::a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createElementNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "a:a:a"},
DOMException.NAMESPACE_ERR));
String xmlURI = "http:
Assertion.equals(doc.createElementNS(xmlURI, "xml:a").getNamespaceURI(), xmlURI);
Assertion.assert(DOMExceptionsTest(doc, "createElementNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "xml:a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createElementNS",
new Class[]{String.class, String.class},
new Object[]{"", "xml:a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createElementNS",
new Class[]{String.class, String.class},
new Object[]{null, "xml:a"},
DOMException.NAMESPACE_ERR));
//unlike Attribute, xmlns (no different from foo) can have any namespaceURI for Element
Assertion.equals(doc.createElementNS("http:
Assertion.equals(doc.createElementNS(xmlURI, "xmlns").getNamespaceURI(), xmlURI);
Assertion.equals(doc.createElementNS("", "xmlns").getNamespaceURI(), "");
Assertion.assert(doc.createElementNS(null, "xmlns").getNamespaceURI() == null);
//unlike Attribute, xmlns:a (no different from foo:a) can have any
// namespaceURI for Element except null
Assertion.equals(doc.createElementNS("http:
Assertion.equals(doc.createElementNS(xmlURI, "xmlns:a").getNamespaceURI(), xmlURI);
Assertion.assert(DOMExceptionsTest(doc, "createElementNS",
new Class[]{String.class, String.class},
new Object[]{null, "xmlns:a"},
DOMException.NAMESPACE_ERR));
//In fact, any prefix != null should have a namespaceURI != null
Assertion.equals(doc.createElementNS("http:
Assertion.assert(DOMExceptionsTest(doc, "createElementNS",
new Class[]{String.class, String.class},
new Object[]{null, "foo:a"},
DOMException.NAMESPACE_ERR));
//Change prefix
Element elem = doc.createElementNS("http://nsa", "foo:a");
elem.setPrefix("bar");
Assertion.equals(elem.getNodeName(), "bar:a");
Assertion.equals(elem.getNamespaceURI(), "http://nsa");
Assertion.equals(elem.getPrefix(), "bar");
Assertion.equals(elem.getLocalName(), "a");
Assertion.equals(elem.getTagName(), "bar:a");
//The spec does not prevent us from setting prefix to a node without prefix
elem = doc.createElementNS("http://nsa", "a");
Assertion.equals(elem.getPrefix(), null);
elem.setPrefix("bar");
Assertion.equals(elem.getNodeName(), "bar:a");
Assertion.equals(elem.getNamespaceURI(), "http://nsa");
Assertion.equals(elem.getPrefix(), "bar");
Assertion.equals(elem.getLocalName(), "a");
Assertion.equals(elem.getTagName(), "bar:a");
//Special case for xml:a where namespaceURI must be xmlURI
elem = doc.createElementNS(xmlURI, "foo:a");
elem.setPrefix("xml");
elem = doc.createElementNS("http://nsa", "foo:a");
Assertion.assert(DOMExceptionsTest(elem, "setPrefix",
new Class[]{String.class},
new Object[]{"xml"},
DOMException.NAMESPACE_ERR));
//However, there is no restriction on prefix xmlns
elem.setPrefix("xmlns");
//Also an element can not have a prefix with namespaceURI == null
elem = doc.createElementNS(null, "a");
Assertion.assert(DOMExceptionsTest(elem, "setPrefix",
new Class[]{String.class},
new Object[]{"foo"},
DOMException.NAMESPACE_ERR));
//Only prefix of Element and Attribute can be changed
Assertion.assert(DOMExceptionsTest(doc, "setPrefix",
new Class[]{String.class},
new Object[]{"foo"},
DOMException.NAMESPACE_ERR));
//Prefix of readonly Element can not be changed.
//However, there is no way to create such Element for testing yet.
}
// CreateAttributeNS methods
{
// Set up an initial (root element only) document.
DOMImplementation impl = DOMImplementationImpl.getDOMImplementation();
String qName = "foo:docName";
String pubId = "pubId";
String sysId = "http://sysId";
DocumentType dt = impl.createDocumentType(qName, pubId, sysId);
String docNSURI = "http://document.namespace";
Document doc = impl.createDocument(docNSURI, qName, dt);
Element rootEl = doc.getDocumentElement();
// CreateAttributeNS
Attr attra = doc.createAttributeNS("http://nsa", "a:attra"); // prefix and URI
Attr attrb = doc.createAttributeNS("http://nsb", "attrb"); // URI, no prefix.
Attr attrc = doc.createAttributeNS(null, "attrc"); // No URI, no prefix.
Assertion.equals(attra.getNodeName(), "a:attra");
Assertion.equals(attra.getNamespaceURI(), "http://nsa");
Assertion.equals(attra.getPrefix(), "a");
Assertion.equals(attra.getLocalName(), "attra");
Assertion.equals(attra.getName(), "a:attra");
Assertion.assert(attra.getOwnerElement() == null);
Assertion.equals(attrb.getNodeName(), "attrb");
Assertion.equals(attrb.getNamespaceURI(), "http://nsb");
Assertion.equals(attrb.getPrefix(), null);
Assertion.equals(attrb.getLocalName(), "attrb");
Assertion.equals(attrb.getName(), "attrb");
Assertion.assert(attrb.getOwnerElement() == null);
Assertion.equals(attrc.getNodeName(), "attrc");
Assertion.assert(attrc.getNamespaceURI() == null);
Assertion.assert(attrc.getPrefix() == null);
Assertion.equals(attrc.getLocalName(), "attrc");
Assertion.equals(attrc.getName(), "attrc");
Assertion.assert(attrc.getOwnerElement() == null);
// Badly formed qualified name
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "<a"},
DOMException.INVALID_CHARACTER_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", ":a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "a:"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "a::a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "a:a:a"},
DOMException.NAMESPACE_ERR));
String xmlURI = "http:
Assertion.equals(doc.createAttributeNS(xmlURI, "xml:a").getNamespaceURI(), xmlURI);
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "xml:a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"", "xml:a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{null, "xml:a"},
DOMException.NAMESPACE_ERR));
String xmlnsURI = "http:
Assertion.equals(doc.createAttributeNS(xmlnsURI, "xmlns").getNamespaceURI(), xmlnsURI);
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "xmlns"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{xmlURI, "xmlns"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"", "xmlns"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{null, "xmlns"},
DOMException.NAMESPACE_ERR));
Assertion.equals(doc.createAttributeNS(xmlnsURI, "xmlns:a").getNamespaceURI(), xmlnsURI);
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"http://nsa", "xmlns:a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{xmlURI, "xmlns:a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"", "xmlns:a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{null, "xmlns:a"},
DOMException.NAMESPACE_ERR));
//In fact, any prefix != null should have a namespaceURI != null
Assertion.equals(doc.createAttributeNS("http:
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{"", "foo:a"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(doc, "createAttributeNS",
new Class[]{String.class, String.class},
new Object[]{null, "foo:a"},
DOMException.NAMESPACE_ERR));
//Change prefix
Attr attr = doc.createAttributeNS("http://nsa", "foo:a");
attr.setPrefix("bar");
Assertion.equals(attr.getNodeName(), "bar:a");
Assertion.equals(attr.getNamespaceURI(), "http://nsa");
Assertion.equals(attr.getPrefix(), "bar");
Assertion.equals(attr.getLocalName(), "a");
Assertion.equals(attr.getName(), "bar:a");
//The spec does not prevent us from setting prefix to a node without prefix
attr = doc.createAttributeNS("http://nsa", "a");
Assertion.assert(attr.getPrefix() == null);
attr.setPrefix("bar");
Assertion.equals(attr.getNodeName(), "bar:a");
Assertion.equals(attr.getNamespaceURI(), "http://nsa");
Assertion.equals(attr.getPrefix(), "bar");
Assertion.equals(attr.getLocalName(), "a");
Assertion.equals(attr.getName(), "bar:a");
//Special case for xml:a where namespaceURI must be xmlURI
attr = doc.createAttributeNS(xmlURI, "foo:a");
attr.setPrefix("xml");
attr = doc.createAttributeNS("http://nsa", "foo:a");
Assertion.assert(DOMExceptionsTest(attr, "setPrefix",
new Class[]{String.class},
new Object[]{"xml"},
DOMException.NAMESPACE_ERR));
//Special case for xmlns:a where namespaceURI must be xmlURI
attr = doc.createAttributeNS(xmlnsURI, "foo:a");
attr.setPrefix("xmlns");
attr = doc.createAttributeNS("http://nsa", "foo:a");
Assertion.assert(DOMExceptionsTest(attr, "setPrefix",
new Class[]{String.class},
new Object[]{"xmlns"},
DOMException.NAMESPACE_ERR));
//Special case for xmlns where no prefix can be set
attr = doc.createAttributeNS(xmlnsURI, "xmlns");
Assertion.assert(DOMExceptionsTest(attr, "setPrefix",
new Class[]{String.class},
new Object[]{"xml"},
DOMException.NAMESPACE_ERR));
Assertion.assert(DOMExceptionsTest(attr, "setPrefix",
new Class[]{String.class},
new Object[]{"foo"},
DOMException.NAMESPACE_ERR));
//Also an attribute can not have a prefix with namespaceURI == null
attr = doc.createAttributeNS(null, "a");
Assertion.assert(DOMExceptionsTest(attr, "setPrefix",
new Class[]{String.class},
new Object[]{"foo"},
DOMException.NAMESPACE_ERR));
//Only prefix of Element and Attribute can be changed
Assertion.assert(DOMExceptionsTest(attr, "setPrefix",
new Class[]{String.class},
new Object[]{"foo"},
DOMException.NAMESPACE_ERR));
//Prefix of readonly Attribute can not be changed.
//However, there is no way to create such DOM_Attribute for testing yet.
}
// getElementsByTagName*
{
// Set up an initial (root element only) document.
DOMImplementation impl = DOMImplementationImpl.getDOMImplementation();
String qName = "foo:docName";
String pubId = "pubId";
String sysId = "http://sysId";
DocumentType dt = impl.createDocumentType(qName, pubId, sysId);
String docNSURI = "http://document.namespace";
Document doc = impl.createDocument(docNSURI, qName, dt);
Element rootEl = doc.getDocumentElement();
// Populate the document
Element ela = doc.createElementNS("http://nsa", "a:ela");
rootEl.appendChild(ela);
Element elb = doc.createElementNS("http://nsb", "elb");
rootEl.appendChild(elb);
Element elc = doc.createElementNS(null, "elc");
rootEl.appendChild(elc);
Element eld = doc.createElementNS("http://nsa", "d:ela");
rootEl.appendChild(eld);
Element ele = doc.createElementNS("http://nse", "elb");
rootEl.appendChild(ele);
// Access with DOM Level 1 getElementsByTagName
NodeList nl = doc.getElementsByTagName("a:ela");
Assertion.assert(nl.getLength() == 1);
Assertion.assert(nl.item(0) == ela);
nl = doc.getElementsByTagName("elb");
Assertion.assert(nl.getLength() == 2);
Assertion.assert(nl.item(0) == elb);
Assertion.assert(nl.item(1) == ele);
nl = doc.getElementsByTagName("d:ela");
Assertion.assert(nl.getLength() == 1);
Assertion.assert(nl.item(0) == eld);
// Access with DOM Level 2 getElementsByTagNameNS
nl = doc.getElementsByTagNameNS(null, "elc");
Assertion.assert(nl.getLength() == 1);
Assertion.assert(nl.item(0) == elc);
nl = doc.getElementsByTagNameNS("http://nsa", "ela");
Assertion.assert(nl.getLength() == 2);
Assertion.assert(nl.item(0) == ela);
Assertion.assert(nl.item(1) == eld);
nl = doc.getElementsByTagNameNS(null, "elb");
Assertion.assert(nl.getLength() == 0);
nl = doc.getElementsByTagNameNS("http://nsb", "elb");
Assertion.assert(nl.getLength() == 1);
Assertion.assert(nl.item(0) == elb);
nl = doc.getElementsByTagNameNS("*", "elb");
Assertion.assert(nl.getLength() == 2);
Assertion.assert(nl.item(0) == elb);
Assertion.assert(nl.item(1) == ele);
nl = doc.getElementsByTagNameNS("http://nsa", "*");
Assertion.assert(nl.getLength() == 2);
Assertion.assert(nl.item(0) == ela);
Assertion.assert(nl.item(1) == eld);
nl = doc.getElementsByTagNameNS("*", "*");
Assertion.assert(nl.getLength() == 6); // Gets the document root element, plus 5 more
Assertion.assert(nl.item(6) == null);
// Assertion.assert(nl.item(-1) == 0);
nl = rootEl.getElementsByTagNameNS("*", "*");
Assertion.assert(nl.getLength() == 5);
nl = doc.getElementsByTagNameNS("http://nsa", "d:ela");
Assertion.assert(nl.getLength() == 0);
// Node lists are Live
nl = doc.getElementsByTagNameNS("*", "*");
NodeList nla = ela.getElementsByTagNameNS("*", "*");
Assertion.assert(nl.getLength() == 6);
Assertion.assert(nla.getLength() == 0);
rootEl.removeChild(elc);
Assertion.assert(nl.getLength() == 5);
Assertion.assert(nla.getLength() == 0);
ela.appendChild(elc);
Assertion.assert(nl.getLength() == 6);
Assertion.assert(nla.getLength() == 1);
}
// Attributes and NamedNodeMaps.
{
// Set up an initial (root element only) document.
DOMImplementation impl = DOMImplementationImpl.getDOMImplementation();
String qName = "foo:docName";
String pubId = "pubId";
String sysId = "http://sysId";
DocumentType dt = impl.createDocumentType(qName, pubId, sysId);
String docNSURI = "http://document.namespace";
Document doc = impl.createDocument(docNSURI, qName, dt);
Element rootEl = doc.getDocumentElement();
// Create a set of attributes and hang them on the root element.
Attr attra = doc.createAttributeNS("http://nsa", "a:attra");
rootEl.setAttributeNodeNS(attra);
Attr attrb = doc.createAttributeNS("http://nsb", "attrb");
rootEl.setAttributeNodeNS(attrb);
Attr attrc = doc.createAttributeNS(null, "attrc");
rootEl.setAttributeNodeNS(attrc);
Attr attrd = doc.createAttributeNS("http://nsa", "d:attra");
rootEl.setAttributeNodeNS(attrd);
Attr attre = doc.createAttributeNS("http://nse", "attrb");
rootEl.setAttributeNodeNS(attre);
// Check that the attribute nodes were created with the correct properties.
Assertion.equals(attra.getNodeName(), "a:attra");
Assertion.equals(attra.getNamespaceURI(), "http://nsa");
Assertion.equals(attra.getLocalName(), "attra");
Assertion.equals(attra.getName(), "a:attra");
Assertion.assert(attra.getNodeType() == Node.ATTRIBUTE_NODE);
Assertion.equals(attra.getNodeValue(), "");
Assertion.equals(attra.getPrefix(), "a");
Assertion.assert(attra.getSpecified() == true);
Assertion.equals(attra.getValue(), "");
Assertion.assert(attra.getOwnerElement() == null);
// Test methods of NamedNodeMap
NamedNodeMap nnm = rootEl.getAttributes();
Assertion.assert(nnm.getLength() == 4);
Assertion.assert(nnm.getNamedItemNS("http://nsa", "attra") == attrd);
Assertion.assert(nnm.getNamedItemNS("http://nsb", "attrb") == attrb);
Assertion.assert(nnm.getNamedItemNS("http://nse", "attrb") == attre);
Assertion.assert(nnm.getNamedItemNS(null, "attrc") == attrc);
Assertion.assert(nnm.getNamedItemNS(null, "attra") == null);
Assertion.assert(nnm.getNamedItemNS("http://nsa", "attrb") == null);
}
};
}
|
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.container.XNameAccess;
import com.sun.star.lang.XComponent;
import com.sun.star.sdbc.DataType;
import com.sun.star.sdbcx.XColumnsSupplier;
import com.sun.star.uno.Any;
import com.sun.star.uno.AnyConverter;
import com.sun.star.uno.Exception;
import com.sun.star.uno.UnoRuntime;
import com.sun.star.uno.XInterface;
import com.sun.star.wizards.common.Helper;
import com.sun.star.sdb.XCompletedExecution;
import com.sun.star.lang.Locale;
import com.sun.star.wizards.common.InvalidQueryException;
import com.sun.star.wizards.common.JavaTools;
import com.sun.star.wizards.common.NumberFormatter;
import com.sun.star.sdbc.XResultSet;
import com.sun.star.task.XInteractionHandler;
public class RecordParser extends QueryMetaData {
XNameAccess xColumns;
com.sun.star.sdbc.XRow xRow;
com.sun.star.sdbc.XRow xResultSetRow;
public XResultSet ResultSet;
XInterface xRowSet;
XCompletedExecution xExecute;
XColumnsSupplier xRowSetColumnsSupplier;
XComponent xRowSetComponent;
XInteractionHandler xInteraction;
public FieldColumn[] GroupFieldColumns;
public FieldColumn[] RecordFieldColumns;
public RecordParser(XMultiServiceFactory _xMSF, Locale _aLocale, NumberFormatter _oNumberFormatter) {
super(_xMSF, _aLocale, _oNumberFormatter);
getInterfaces();
}
/** Creates a new instance of RecordParser */
public RecordParser(XMultiServiceFactory _xMSF) {
super(_xMSF);
getInterfaces();
}
private void getInterfaces() {
try {
xRowSet = (XInterface) xMSF.createInstance("com.sun.star.sdb.RowSet");
xRowSetColumnsSupplier = (XColumnsSupplier) UnoRuntime.queryInterface(XColumnsSupplier.class, xRowSet);
xRowSetComponent = (XComponent) UnoRuntime.queryInterface(XComponent.class, xRowSet);
xExecute = (com.sun.star.sdb.XCompletedExecution) UnoRuntime.queryInterface(com.sun.star.sdb.XCompletedExecution.class, xRowSet);
XInterface oInteraction = (XInterface) xMSF.createInstance("com.sun.star.sdb.InteractionHandler");
xInteraction = (XInteractionHandler) UnoRuntime.queryInterface(XInteractionHandler.class, oInteraction);
} catch (Exception exception) {
exception.printStackTrace(System.out);
}
}
private Object getColumnStringValue(int ColIndex) {
try {
com.sun.star.uno.Type CurType;
Object oAny;
String sValue = xResultSetRow.getString(ColIndex);
CurType = new com.sun.star.uno.Type(String.class);
oAny = AnyConverter.toObject(CurType, sValue);
return oAny;
} catch (Exception exception) {
exception.printStackTrace(System.out);
return null;
}
}
private Object getColumnDoubleValue(int ColIndex, boolean bisDate) {
try {
Double DblValue;
if (bisDate)
DblValue = new Double(xResultSetRow.getDouble(ColIndex) + (double) super.getNullDateCorrection());
else
DblValue = new Double(xResultSetRow.getDouble(ColIndex));
if (!xResultSetRow.wasNull())
return DblValue;
} catch (Exception exception) {
exception.printStackTrace(System.out);
}
return Any.VOID;
}
public Object getColumnValue(int ColIndex, int iType) {
boolean bResult;
Object oAny = Any.VOID;
switch (iType) {
case DataType.BIT :
case DataType.BOOLEAN :
oAny = getColumnDoubleValue(ColIndex, false);
break;
case DataType.TINYINT :
oAny = getColumnDoubleValue(ColIndex, false);
break;
case DataType.BIGINT :
oAny = getColumnDoubleValue(ColIndex, false);
break;
case DataType.CHAR :
oAny = getColumnStringValue(ColIndex);
break;
case DataType.VARCHAR :
oAny = getColumnStringValue(ColIndex);
// double sResult = xResultSetRow.getClob(ColIndex); //???
break;
case DataType.LONGVARCHAR :
oAny = getColumnStringValue(ColIndex);
// double sResult = xResultSetRow.getCharacterStream(ColIndex);
break;
case DataType.NUMERIC :
oAny = getColumnDoubleValue(ColIndex, false);
break;
case DataType.INTEGER :
oAny = getColumnDoubleValue(ColIndex, false);
break;
case DataType.SMALLINT :
oAny = getColumnDoubleValue(ColIndex, false);
break;
case DataType.DECIMAL : // == 3; [mit Nachkommastellen]
oAny = getColumnDoubleValue(ColIndex, false);
break;
case DataType.FLOAT :
oAny = getColumnDoubleValue(ColIndex, false);
break;
case DataType.REAL :
oAny = getColumnDoubleValue(ColIndex, false);
break;
case DataType.DOUBLE :
oAny = getColumnDoubleValue(ColIndex, false);
break;
case DataType.DATE :
oAny = getColumnDoubleValue(ColIndex, true);
break;
case DataType.TIME :
oAny = getColumnDoubleValue(ColIndex, false);
break;
case DataType.TIMESTAMP :
oAny = getColumnDoubleValue(ColIndex, true);
break;
}
// Is the index OK; increment?
// String sValue = xResultSetRow.getString(ColIndex);
return oAny;
}
public boolean executeCommand(String sMessage, boolean binitializeDBColumns) throws InvalidQueryException {
try {
Helper.setUnoPropertyValue(xRowSet, "DataSourceName", DataSourceName);
Helper.setUnoPropertyValue(xRowSet, "ActiveConnection", DBConnection);
Helper.setUnoPropertyValue(xRowSet, "Command", Command);
Helper.setUnoPropertyValue(xRowSet, "CommandType", new Integer(com.sun.star.sdb.CommandType.COMMAND)); // CommandType
xExecute.executeWithCompletion(xInteraction);
// create the ResultSet to access the data
com.sun.star.sdb.XResultSetAccess xResultAccess = (com.sun.star.sdb.XResultSetAccess) UnoRuntime.queryInterface(com.sun.star.sdb.XResultSetAccess.class, xRowSet);
ResultSet = xResultAccess.createResultSet();
xResultSetRow = (com.sun.star.sdbc.XRow) UnoRuntime.queryInterface(com.sun.star.sdbc.XRow.class, ResultSet);
XColumnsSupplier xDBCols = (XColumnsSupplier) UnoRuntime.queryInterface(XColumnsSupplier.class, ResultSet);
xColumns = xDBCols.getColumns();
setCommandType(com.sun.star.sdb.CommandType.COMMAND);
if (binitializeDBColumns == true)
setFieldNames(FieldNames, xColumns);
// super.setFieldColumns(false);
this.setColindices();
GroupFieldColumns = getFieldColumnList(GroupFieldNames);
RecordFieldColumns = getFieldColumnList(RecordFieldNames);
// GroupColIndices = getSelectedQueryFields(GroupFieldNames);
// SelColIndices = getSelectedQueryFields(RecordFieldNames);
// todo: the following Methods call directly from Dataimport object
return true;
} catch (InvalidQueryException queryexception) {
queryexception.printStackTrace(System.out);
return false;
} catch (Exception exception) {
exception.printStackTrace(System.out);
throw new InvalidQueryException(xMSF, Command);
}
}
private FieldColumn[] getFieldColumnList(String[] _FieldNames) {
FieldColumn[] LocFieldColumns = new FieldColumn[_FieldNames.length];
for (int i = 0; i < _FieldNames.length; i++) {
LocFieldColumns[i] = super.getFieldColumnByDisplayName(_FieldNames[i]);
}
return LocFieldColumns;
}
public Object getGroupColumnValue(int ColIndex) {
FieldColumn CurDBFieldColumn = this.GroupFieldColumns[ColIndex];
return getColumnValue(CurDBFieldColumn.ColIndex, CurDBFieldColumn.FieldType);
}
public boolean getcurrentRecordData(java.util.Vector DataVector) {
Object[] RecordValueArray = new Object[RecordFieldNames.length];
for (int i = 0; i < RecordFieldNames.length; i++) {
FieldColumn CurDBFieldColumn = this.RecordFieldColumns[i];
RecordValueArray[i] = getColumnValue(CurDBFieldColumn.ColIndex, CurDBFieldColumn.FieldType); //FinalColIndex
}
DataVector.addElement(RecordValueArray);
return true;
}
private void setColindices() throws InvalidQueryException {
String[] AllQueryFieldNames = xColumns.getElementNames();
for (int i = 0; i < FieldNames.length; i++) {
FieldColumn CurFieldColumn = getFieldColumnByDisplayName(FieldNames[i]);
CurFieldColumn.ColIndex = JavaTools.FieldInList(AllQueryFieldNames, CurFieldColumn.FieldName) + 1;
if (CurFieldColumn.ColIndex == -1)
throw new InvalidQueryException(xMSF, Command);
}
}
public void dispose() {
if (xRowSetComponent != null)
xRowSetComponent.dispose();
super.dispose();
}
}
|
package org.elasticsearch.xpack.security.authc;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequestBuilder;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.security.AuthenticateResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.SecurityIntegTestCase;
import org.elasticsearch.test.SecuritySettingsSource;
import org.elasticsearch.test.SecuritySettingsSourceField;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.security.action.ApiKey;
import org.elasticsearch.xpack.core.security.action.CreateApiKeyRequestBuilder;
import org.elasticsearch.xpack.core.security.action.CreateApiKeyResponse;
import org.elasticsearch.xpack.core.security.action.GetApiKeyAction;
import org.elasticsearch.xpack.core.security.action.GetApiKeyRequest;
import org.elasticsearch.xpack.core.security.action.GetApiKeyResponse;
import org.elasticsearch.xpack.core.security.action.InvalidateApiKeyAction;
import org.elasticsearch.xpack.core.security.action.InvalidateApiKeyRequest;
import org.elasticsearch.xpack.core.security.action.InvalidateApiKeyResponse;
import org.elasticsearch.xpack.core.security.action.user.PutUserAction;
import org.elasticsearch.xpack.core.security.action.user.PutUserRequest;
import org.elasticsearch.xpack.core.security.action.user.PutUserResponse;
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
import org.elasticsearch.xpack.core.security.authz.RoleDescriptor;
import org.elasticsearch.xpack.security.transport.filter.IPFilter;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames.SECURITY_MAIN_ALIAS;
import static org.elasticsearch.xpack.security.Security.SECURITY_CRYPTO_THREAD_POOL_NAME;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.in;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
public class ApiKeyIntegTests extends SecurityIntegTestCase {
private static final long DELETE_INTERVAL_MILLIS = 100L;
private static final int CRYPTO_THREAD_POOL_QUEUE_SIZE = 10;
@Override
public Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.put(XPackSettings.API_KEY_SERVICE_ENABLED_SETTING.getKey(), true)
.put(ApiKeyService.DELETE_INTERVAL.getKey(), TimeValue.timeValueMillis(DELETE_INTERVAL_MILLIS))
.put(ApiKeyService.DELETE_TIMEOUT.getKey(), TimeValue.timeValueSeconds(5L))
.put("xpack.security.crypto.thread_pool.queue_size", CRYPTO_THREAD_POOL_QUEUE_SIZE)
.build();
}
@Override
protected boolean addMockHttpTransport() {
return false; // need real http
}
@Before
public void waitForSecurityIndexWritable() throws Exception {
assertSecurityIndexActive();
}
@After
public void wipeSecurityIndex() throws Exception {
// get the api key service and wait until api key expiration is not in progress!
awaitApiKeysRemoverCompletion();
deleteSecurityIndex();
}
@Override
public String configRoles() {
return super.configRoles() + "\n" +
"no_api_key_role:\n" +
" cluster: [\"manage_token\"]\n" +
"manage_api_key_role:\n" +
" cluster: [\"manage_api_key\"]\n" +
"manage_own_api_key_role:\n" +
" cluster: [\"manage_own_api_key\"]\n" +
"run_as_role:\n" +
" run_as: [\"user_with_manage_own_api_key_role\"]\n";
}
@Override
public String configUsers() {
final String usersPasswdHashed = new String(
getFastStoredHashAlgoForTests().hash(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING));
return super.configUsers() +
"user_with_no_api_key_role:" + usersPasswdHashed + "\n" +
"user_with_manage_api_key_role:" + usersPasswdHashed + "\n" +
"user_with_manage_own_api_key_role:" + usersPasswdHashed + "\n";
}
@Override
public String configUsersRoles() {
return super.configUsersRoles() +
"no_api_key_role:user_with_no_api_key_role\n" +
"manage_api_key_role:user_with_manage_api_key_role\n" +
"manage_own_api_key_role:user_with_manage_own_api_key_role\n";
}
private void awaitApiKeysRemoverCompletion() throws Exception {
for (ApiKeyService apiKeyService : internalCluster().getInstances(ApiKeyService.class)) {
assertBusy(() -> assertFalse(apiKeyService.isExpirationInProgress()));
}
}
public void testCreateApiKey() throws Exception{
// Get an instant without nanoseconds as the expiration has millisecond precision
final Instant start = Instant.ofEpochMilli(Instant.now().toEpochMilli());
final RoleDescriptor descriptor = new RoleDescriptor("role", new String[] { "monitor" }, null, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization",
UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER,
SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
final CreateApiKeyResponse response = new CreateApiKeyRequestBuilder(client)
.setName("test key")
.setExpiration(TimeValue.timeValueHours(TimeUnit.DAYS.toHours(7L)))
.setRoleDescriptors(Collections.singletonList(descriptor))
.get();
assertEquals("test key", response.getName());
assertNotNull(response.getId());
assertNotNull(response.getKey());
Instant expiration = response.getExpiration();
// Expiration has millisecond precision
final long daysBetween = ChronoUnit.DAYS.between(start, expiration);
assertThat(daysBetween, is(7L));
// create simple api key
final CreateApiKeyResponse simple = new CreateApiKeyRequestBuilder(client).setName("simple").get();
assertEquals("simple", simple.getName());
assertNotNull(simple.getId());
assertNotNull(simple.getKey());
assertThat(simple.getId(), not(containsString(new String(simple.getKey().getChars()))));
assertNull(simple.getExpiration());
// use the first ApiKey for authorized action
final String base64ApiKeyKeyValue = Base64.getEncoder().encodeToString(
(response.getId() + ":" + response.getKey().toString()).getBytes(StandardCharsets.UTF_8));
// Assert that we can authenticate with the API KEY
final RestHighLevelClient restClient = new TestRestHighLevelClient();
AuthenticateResponse authResponse = restClient.security().authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization",
"ApiKey " + base64ApiKeyKeyValue).build());
assertThat(authResponse.getUser().getUsername(), equalTo(SecuritySettingsSource.TEST_SUPERUSER));
// use the first ApiKey for an unauthorized action
ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, () ->
client().filterWithHeader(Collections.singletonMap("Authorization", "ApiKey " + base64ApiKeyKeyValue))
.admin()
.cluster()
.prepareUpdateSettings().setTransientSettings(Settings.builder().put(IPFilter.IP_FILTER_ENABLED_SETTING.getKey(), true))
.get());
assertThat(e.getMessage(), containsString("unauthorized"));
assertThat(e.status(), is(RestStatus.FORBIDDEN));
}
public void testMultipleApiKeysCanHaveSameName() {
String keyName = randomAlphaOfLength(5);
int noOfApiKeys = randomIntBetween(2, 5);
List<CreateApiKeyResponse> responses = new ArrayList<>();
for (int i = 0; i < noOfApiKeys; i++) {
final RoleDescriptor descriptor = new RoleDescriptor("role", new String[]{"monitor"}, null, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
final CreateApiKeyResponse response = new CreateApiKeyRequestBuilder(client).setName(keyName).setExpiration(null)
.setRoleDescriptors(Collections.singletonList(descriptor)).get();
assertNotNull(response.getId());
assertNotNull(response.getKey());
responses.add(response);
}
assertThat(responses.size(), is(noOfApiKeys));
for (int i = 0; i < noOfApiKeys; i++) {
assertThat(responses.get(i).getName(), is(keyName));
}
}
public void testInvalidateApiKeysForRealm() throws InterruptedException, ExecutionException {
int noOfApiKeys = randomIntBetween(3, 5);
List<CreateApiKeyResponse> responses = createApiKeys(noOfApiKeys, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.usingRealmName("file"), listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
verifyInvalidateResponse(noOfApiKeys, responses, invalidateResponse);
}
public void testInvalidateApiKeysForUser() throws Exception {
int noOfApiKeys = randomIntBetween(3, 5);
List<CreateApiKeyResponse> responses = createApiKeys(noOfApiKeys, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE,
InvalidateApiKeyRequest.usingUserName(SecuritySettingsSource.TEST_SUPERUSER), listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
verifyInvalidateResponse(noOfApiKeys, responses, invalidateResponse);
}
public void testInvalidateApiKeysForRealmAndUser() throws InterruptedException, ExecutionException {
List<CreateApiKeyResponse> responses = createApiKeys(1, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE,
InvalidateApiKeyRequest.usingRealmAndUserName("file", SecuritySettingsSource.TEST_SUPERUSER), listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
verifyInvalidateResponse(1, responses, invalidateResponse);
}
public void testInvalidateApiKeysForApiKeyId() throws InterruptedException, ExecutionException {
List<CreateApiKeyResponse> responses = createApiKeys(1, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.usingApiKeyId(responses.get(0).getId(), false), listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
verifyInvalidateResponse(1, responses, invalidateResponse);
}
public void testInvalidateApiKeysForApiKeyName() throws InterruptedException, ExecutionException {
List<CreateApiKeyResponse> responses = createApiKeys(1, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.usingApiKeyName(responses.get(0).getName(), false),
listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
verifyInvalidateResponse(1, responses, invalidateResponse);
}
private void verifyInvalidateResponse(int noOfApiKeys, List<CreateApiKeyResponse> responses,
InvalidateApiKeyResponse invalidateResponse) {
assertThat(invalidateResponse.getInvalidatedApiKeys().size(), equalTo(noOfApiKeys));
assertThat(invalidateResponse.getInvalidatedApiKeys(),
containsInAnyOrder(responses.stream().map(r -> r.getId()).collect(Collectors.toList()).toArray(Strings.EMPTY_ARRAY)));
assertThat(invalidateResponse.getPreviouslyInvalidatedApiKeys().size(), equalTo(0));
assertThat(invalidateResponse.getErrors().size(), equalTo(0));
}
public void testInvalidatedApiKeysDeletedByRemover() throws Exception {
Client client = waitForExpiredApiKeysRemoverTriggerReadyAndGetClient().filterWithHeader(
Collections.singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER,
SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
List<CreateApiKeyResponse> createdApiKeys = createApiKeys(2, null);
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.usingApiKeyId(createdApiKeys.get(0).getId(), false),
listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
assertThat(invalidateResponse.getInvalidatedApiKeys().size(), equalTo(1));
assertThat(invalidateResponse.getPreviouslyInvalidatedApiKeys().size(), equalTo(0));
assertThat(invalidateResponse.getErrors().size(), equalTo(0));
awaitApiKeysRemoverCompletion();
refreshSecurityIndex();
PlainActionFuture<GetApiKeyResponse> getApiKeyResponseListener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingRealmName("file"), getApiKeyResponseListener);
Set<String> expectedKeyIds = Sets.newHashSet(createdApiKeys.get(0).getId(), createdApiKeys.get(1).getId());
boolean apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover = false;
for (ApiKey apiKey : getApiKeyResponseListener.get().getApiKeyInfos()) {
assertThat(apiKey.getId(), is(in(expectedKeyIds)));
if (apiKey.getId().equals(createdApiKeys.get(0).getId())) {
// has been invalidated but not yet deleted by ExpiredApiKeysRemover
assertThat(apiKey.isInvalidated(), is(true));
apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover = true;
} else if (apiKey.getId().equals(createdApiKeys.get(1).getId())) {
// active api key
assertThat(apiKey.isInvalidated(), is(false));
}
}
assertThat(getApiKeyResponseListener.get().getApiKeyInfos().length,
is((apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover) ? 2 : 1));
client = waitForExpiredApiKeysRemoverTriggerReadyAndGetClient().filterWithHeader(
Collections.singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER,
SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
// invalidate API key to trigger remover
listener = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.usingApiKeyId(createdApiKeys.get(1).getId(), false),
listener);
assertThat(listener.get().getInvalidatedApiKeys().size(), is(1));
awaitApiKeysRemoverCompletion();
refreshSecurityIndex();
// Verify that 1st invalidated API key is deleted whereas the next one may be or may not be as it depends on whether update was
// indexed before ExpiredApiKeysRemover ran
getApiKeyResponseListener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingRealmName("file"), getApiKeyResponseListener);
expectedKeyIds = Sets.newHashSet(createdApiKeys.get(1).getId());
apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover = false;
for (ApiKey apiKey : getApiKeyResponseListener.get().getApiKeyInfos()) {
assertThat(apiKey.getId(), is(in(expectedKeyIds)));
if (apiKey.getId().equals(createdApiKeys.get(1).getId())) {
// has been invalidated but not yet deleted by ExpiredApiKeysRemover
assertThat(apiKey.isInvalidated(), is(true));
apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover = true;
}
}
assertThat(getApiKeyResponseListener.get().getApiKeyInfos().length,
is((apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover) ? 1 : 0));
}
private Client waitForExpiredApiKeysRemoverTriggerReadyAndGetClient() throws Exception {
String nodeWithMostRecentRun = null;
long apiKeyLastTrigger = -1L;
for (String nodeName : internalCluster().getNodeNames()) {
ApiKeyService apiKeyService = internalCluster().getInstance(ApiKeyService.class, nodeName);
if (apiKeyService != null) {
if (apiKeyService.lastTimeWhenApiKeysRemoverWasTriggered() > apiKeyLastTrigger) {
nodeWithMostRecentRun = nodeName;
apiKeyLastTrigger = apiKeyService.lastTimeWhenApiKeysRemoverWasTriggered();
}
}
}
final ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeWithMostRecentRun);
final long lastRunTime = apiKeyLastTrigger;
assertBusy(() -> {
assertThat(threadPool.relativeTimeInMillis() - lastRunTime, greaterThan(DELETE_INTERVAL_MILLIS));
});
return internalCluster().client(nodeWithMostRecentRun);
}
public void testExpiredApiKeysBehaviorWhenKeysExpired1WeekBeforeAnd1DayBefore() throws Exception {
Client client = waitForExpiredApiKeysRemoverTriggerReadyAndGetClient().filterWithHeader(
Collections.singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER,
SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
int noOfKeys = 4;
List<CreateApiKeyResponse> createdApiKeys = createApiKeys(noOfKeys, null);
Instant created = Instant.now();
PlainActionFuture<GetApiKeyResponse> getApiKeyResponseListener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingRealmName("file"), getApiKeyResponseListener);
assertThat(getApiKeyResponseListener.get().getApiKeyInfos().length, is(noOfKeys));
// Expire the 1st key such that it cannot be deleted by the remover
// hack doc to modify the expiration time to a day before
Instant dayBefore = created.minus(1L, ChronoUnit.DAYS);
assertTrue(Instant.now().isAfter(dayBefore));
UpdateResponse expirationDateUpdatedResponse = client
.prepareUpdate(SECURITY_MAIN_ALIAS, createdApiKeys.get(0).getId())
.setDoc("expiration_time", dayBefore.toEpochMilli())
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.get();
assertThat(expirationDateUpdatedResponse.getResult(), is(DocWriteResponse.Result.UPDATED));
// Expire the 2nd key such that it can be deleted by the remover
// hack doc to modify the expiration time to the week before
Instant weekBefore = created.minus(8L, ChronoUnit.DAYS);
assertTrue(Instant.now().isAfter(weekBefore));
expirationDateUpdatedResponse = client.prepareUpdate(SECURITY_MAIN_ALIAS, createdApiKeys.get(1).getId())
.setDoc("expiration_time", weekBefore.toEpochMilli())
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.get();
assertThat(expirationDateUpdatedResponse.getResult(), is(DocWriteResponse.Result.UPDATED));
// Invalidate to trigger the remover
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.usingApiKeyId(createdApiKeys.get(2).getId(), false),
listener);
assertThat(listener.get().getInvalidatedApiKeys().size(), is(1));
awaitApiKeysRemoverCompletion();
refreshSecurityIndex();
// Verify get API keys does not return api keys deleted by ExpiredApiKeysRemover
getApiKeyResponseListener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingRealmName("file"), getApiKeyResponseListener);
Set<String> expectedKeyIds = Sets.newHashSet(createdApiKeys.get(0).getId(), createdApiKeys.get(2).getId(),
createdApiKeys.get(3).getId());
boolean apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover = false;
for (ApiKey apiKey : getApiKeyResponseListener.get().getApiKeyInfos()) {
assertThat(apiKey.getId(), is(in(expectedKeyIds)));
if (apiKey.getId().equals(createdApiKeys.get(0).getId())) {
// has been expired, not invalidated
assertTrue(apiKey.getExpiration().isBefore(Instant.now()));
assertThat(apiKey.isInvalidated(), is(false));
} else if (apiKey.getId().equals(createdApiKeys.get(2).getId())) {
// has not been expired as no expiration, is invalidated but not yet deleted by ExpiredApiKeysRemover
assertThat(apiKey.getExpiration(), is(nullValue()));
assertThat(apiKey.isInvalidated(), is(true));
apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover = true;
} else if (apiKey.getId().equals(createdApiKeys.get(3).getId())) {
// has not been expired as no expiration, not invalidated
assertThat(apiKey.getExpiration(), is(nullValue()));
assertThat(apiKey.isInvalidated(), is(false));
} else {
fail("unexpected API key " + apiKey);
}
}
assertThat(getApiKeyResponseListener.get().getApiKeyInfos().length,
is((apiKeyInvalidatedButNotYetDeletedByExpiredApiKeysRemover) ? 3 : 2));
}
private void refreshSecurityIndex() throws Exception {
assertBusy(() -> {
final RefreshResponse refreshResponse = client().admin().indices().prepareRefresh(SECURITY_MAIN_ALIAS).get();
assertThat(refreshResponse.getFailedShards(), is(0));
});
}
public void testActiveApiKeysWithNoExpirationNeverGetDeletedByRemover() throws Exception {
List<CreateApiKeyResponse> responses = createApiKeys(2, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
// trigger expired keys remover
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.usingApiKeyId(responses.get(1).getId(), false), listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
assertThat(invalidateResponse.getInvalidatedApiKeys().size(), equalTo(1));
assertThat(invalidateResponse.getPreviouslyInvalidatedApiKeys().size(), equalTo(0));
assertThat(invalidateResponse.getErrors().size(), equalTo(0));
PlainActionFuture<GetApiKeyResponse> getApiKeyResponseListener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingRealmName("file"), getApiKeyResponseListener);
GetApiKeyResponse response = getApiKeyResponseListener.get();
verifyGetResponse(2, responses, response, Collections.singleton(responses.get(0).getId()),
Collections.singletonList(responses.get(1).getId()));
}
public void testGetApiKeysForRealm() throws InterruptedException, ExecutionException {
int noOfApiKeys = randomIntBetween(3, 5);
List<CreateApiKeyResponse> responses = createApiKeys(noOfApiKeys, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
boolean invalidate= randomBoolean();
List<String> invalidatedApiKeyIds = null;
Set<String> expectedValidKeyIds = null;
if (invalidate) {
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.usingApiKeyId(responses.get(0).getId(), false),
listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
invalidatedApiKeyIds = invalidateResponse.getInvalidatedApiKeys();
expectedValidKeyIds = responses.stream().filter(o -> !o.getId().equals(responses.get(0).getId())).map(o -> o.getId())
.collect(Collectors.toSet());
} else {
invalidatedApiKeyIds = Collections.emptyList();
expectedValidKeyIds = responses.stream().map(o -> o.getId()).collect(Collectors.toSet());
}
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingRealmName("file"), listener);
GetApiKeyResponse response = listener.get();
verifyGetResponse(noOfApiKeys, responses, response,
expectedValidKeyIds,
invalidatedApiKeyIds);
}
public void testGetApiKeysForUser() throws Exception {
int noOfApiKeys = randomIntBetween(3, 5);
List<CreateApiKeyResponse> responses = createApiKeys(noOfApiKeys, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingUserName(SecuritySettingsSource.TEST_SUPERUSER), listener);
GetApiKeyResponse response = listener.get();
verifyGetResponse(noOfApiKeys, responses, response, responses.stream().map(o -> o.getId()).collect(Collectors.toSet()), null);
}
public void testGetApiKeysForRealmAndUser() throws InterruptedException, ExecutionException {
List<CreateApiKeyResponse> responses = createApiKeys(1, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingRealmAndUserName("file", SecuritySettingsSource.TEST_SUPERUSER),
listener);
GetApiKeyResponse response = listener.get();
verifyGetResponse(1, responses, response, Collections.singleton(responses.get(0).getId()), null);
}
public void testGetApiKeysForApiKeyId() throws InterruptedException, ExecutionException {
List<CreateApiKeyResponse> responses = createApiKeys(1, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingApiKeyId(responses.get(0).getId(), false), listener);
GetApiKeyResponse response = listener.get();
verifyGetResponse(1, responses, response, Collections.singleton(responses.get(0).getId()), null);
}
public void testGetApiKeysForApiKeyName() throws InterruptedException, ExecutionException {
List<CreateApiKeyResponse> responses = createApiKeys(1, null);
Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingApiKeyName(responses.get(0).getName(), false), listener);
GetApiKeyResponse response = listener.get();
verifyGetResponse(1, responses, response, Collections.singleton(responses.get(0).getId()), null);
}
public void testGetApiKeysOwnedByCurrentAuthenticatedUser() throws InterruptedException, ExecutionException {
int noOfSuperuserApiKeys = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageApiKeyRole = randomIntBetween(3, 5);
List<CreateApiKeyResponse> defaultUserCreatedKeys = createApiKeys(noOfSuperuserApiKeys, null);
String userWithManageApiKeyRole = randomFrom("user_with_manage_api_key_role", "user_with_manage_own_api_key_role");
List<CreateApiKeyResponse> userWithManageApiKeyRoleApiKeys = createApiKeys(userWithManageApiKeyRole,
noOfApiKeysForUserWithManageApiKeyRole, null, "monitor");
final Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(userWithManageApiKeyRole, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.forOwnedApiKeys(), listener);
GetApiKeyResponse response = listener.get();
verifyGetResponse(userWithManageApiKeyRole, noOfApiKeysForUserWithManageApiKeyRole, userWithManageApiKeyRoleApiKeys,
response, userWithManageApiKeyRoleApiKeys.stream().map(o -> o.getId()).collect(Collectors.toSet()), null);
}
public void testGetApiKeysOwnedByRunAsUserWhenOwnerIsTrue() throws ExecutionException, InterruptedException {
createUserWithRunAsRole();
int noOfSuperuserApiKeys = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageApiKeyRole = randomIntBetween(3, 5);
createApiKeys(noOfSuperuserApiKeys, null);
List<CreateApiKeyResponse> userWithManageOwnApiKeyRoleApiKeys = createApiKeys("user_with_manage_own_api_key_role",
"user_with_run_as_role", noOfApiKeysForUserWithManageApiKeyRole, null, "monitor");
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
getClientForRunAsUser().execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.forOwnedApiKeys(), listener);
GetApiKeyResponse response = listener.get();
verifyGetResponse("user_with_manage_own_api_key_role", noOfApiKeysForUserWithManageApiKeyRole, userWithManageOwnApiKeyRoleApiKeys,
response, userWithManageOwnApiKeyRoleApiKeys.stream().map(o -> o.getId()).collect(Collectors.toSet()), null);
}
public void testGetApiKeysOwnedByRunAsUserWhenRunAsUserInfoIsGiven() throws ExecutionException, InterruptedException {
createUserWithRunAsRole();
int noOfSuperuserApiKeys = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageApiKeyRole = randomIntBetween(3, 5);
createApiKeys(noOfSuperuserApiKeys, null);
List<CreateApiKeyResponse> userWithManageOwnApiKeyRoleApiKeys = createApiKeys("user_with_manage_own_api_key_role",
"user_with_run_as_role", noOfApiKeysForUserWithManageApiKeyRole, null, "monitor");
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
getClientForRunAsUser().execute(GetApiKeyAction.INSTANCE,
GetApiKeyRequest.usingRealmAndUserName("file", "user_with_manage_own_api_key_role"), listener);
GetApiKeyResponse response = listener.get();
verifyGetResponse("user_with_manage_own_api_key_role", noOfApiKeysForUserWithManageApiKeyRole, userWithManageOwnApiKeyRoleApiKeys,
response, userWithManageOwnApiKeyRoleApiKeys.stream().map(o -> o.getId()).collect(Collectors.toSet()), null);
}
public void testGetApiKeysOwnedByRunAsUserWillNotWorkWhenAuthUserInfoIsGiven() throws ExecutionException, InterruptedException {
createUserWithRunAsRole();
int noOfSuperuserApiKeys = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageApiKeyRole = randomIntBetween(3, 5);
createApiKeys(noOfSuperuserApiKeys, null);
final List<CreateApiKeyResponse> userWithManageOwnApiKeyRoleApiKeys = createApiKeys("user_with_manage_own_api_key_role",
"user_with_run_as_role", noOfApiKeysForUserWithManageApiKeyRole, null, "monitor");
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
final Tuple<String,String> invalidRealmAndUserPair = randomFrom(
new Tuple<>("file", "user_with_run_as_role"),
new Tuple<>("index", "user_with_manage_own_api_key_role"),
new Tuple<>("index", "user_with_run_as_role"));
getClientForRunAsUser().execute(GetApiKeyAction.INSTANCE,
GetApiKeyRequest.usingRealmAndUserName(invalidRealmAndUserPair.v1(), invalidRealmAndUserPair.v2()), listener);
final ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, listener::actionGet);
assertThat(e.getMessage(), containsString(
"unauthorized for user [user_with_run_as_role] run as [user_with_manage_own_api_key_role]"));
}
public void testGetAllApiKeys() throws InterruptedException, ExecutionException {
int noOfSuperuserApiKeys = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageApiKeyRole = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageOwnApiKeyRole = randomIntBetween(3,7);
List<CreateApiKeyResponse> defaultUserCreatedKeys = createApiKeys(noOfSuperuserApiKeys, null);
List<CreateApiKeyResponse> userWithManageApiKeyRoleApiKeys = createApiKeys("user_with_manage_api_key_role",
noOfApiKeysForUserWithManageApiKeyRole, null, "monitor");
List<CreateApiKeyResponse> userWithManageOwnApiKeyRoleApiKeys = createApiKeys("user_with_manage_own_api_key_role",
noOfApiKeysForUserWithManageOwnApiKeyRole, null, "monitor");
final Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue("user_with_manage_api_key_role", SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, new GetApiKeyRequest(), listener);
GetApiKeyResponse response = listener.get();
int totalApiKeys = noOfSuperuserApiKeys + noOfApiKeysForUserWithManageApiKeyRole + noOfApiKeysForUserWithManageOwnApiKeyRole;
List<CreateApiKeyResponse> allApiKeys = new ArrayList<>();
Stream.of(defaultUserCreatedKeys, userWithManageApiKeyRoleApiKeys, userWithManageOwnApiKeyRoleApiKeys).forEach(
allApiKeys::addAll);
verifyGetResponse(new String[]{SecuritySettingsSource.TEST_SUPERUSER, "user_with_manage_api_key_role",
"user_with_manage_own_api_key_role"}, totalApiKeys, allApiKeys, response,
allApiKeys.stream().map(o -> o.getId()).collect(Collectors.toSet()), null);
}
public void testGetAllApiKeysFailsForUserWithNoRoleOrRetrieveOwnApiKeyRole() throws InterruptedException, ExecutionException {
int noOfSuperuserApiKeys = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageApiKeyRole = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageOwnApiKeyRole = randomIntBetween(3,7);
List<CreateApiKeyResponse> defaultUserCreatedKeys = createApiKeys(noOfSuperuserApiKeys, null);
List<CreateApiKeyResponse> userWithManageApiKeyRoleApiKeys = createApiKeys("user_with_manage_api_key_role",
noOfApiKeysForUserWithManageApiKeyRole, null, "monitor");
List<CreateApiKeyResponse> userWithManageOwnApiKeyRoleApiKeys = createApiKeys("user_with_manage_own_api_key_role",
noOfApiKeysForUserWithManageOwnApiKeyRole, null, "monitor");
final String withUser = randomFrom("user_with_manage_own_api_key_role", "user_with_no_api_key_role");
final Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(withUser, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, new GetApiKeyRequest(), listener);
ElasticsearchSecurityException ese = expectThrows(ElasticsearchSecurityException.class, () -> listener.actionGet());
assertErrorMessage(ese, "cluster:admin/xpack/security/api_key/get", withUser);
}
public void testInvalidateApiKeysOwnedByCurrentAuthenticatedUser() throws InterruptedException, ExecutionException {
int noOfSuperuserApiKeys = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageApiKeyRole = randomIntBetween(3, 5);
List<CreateApiKeyResponse> defaultUserCreatedKeys = createApiKeys(noOfSuperuserApiKeys, null);
String userWithManageApiKeyRole = randomFrom("user_with_manage_api_key_role", "user_with_manage_own_api_key_role");
List<CreateApiKeyResponse> userWithManageApiKeyRoleApiKeys = createApiKeys(userWithManageApiKeyRole,
noOfApiKeysForUserWithManageApiKeyRole, null, "monitor");
final Client client = client().filterWithHeader(Collections.singletonMap("Authorization", UsernamePasswordToken
.basicAuthHeaderValue(userWithManageApiKeyRole, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.forOwnedApiKeys(), listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
verifyInvalidateResponse(noOfApiKeysForUserWithManageApiKeyRole, userWithManageApiKeyRoleApiKeys, invalidateResponse);
}
public void testInvalidateApiKeysOwnedByRunAsUserWhenOwnerIsTrue() throws InterruptedException, ExecutionException {
createUserWithRunAsRole();
int noOfSuperuserApiKeys = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageApiKeyRole = randomIntBetween(3, 5);
createApiKeys(noOfSuperuserApiKeys, null);
List<CreateApiKeyResponse> userWithManageApiKeyRoleApiKeys = createApiKeys("user_with_manage_own_api_key_role",
"user_with_run_as_role", noOfApiKeysForUserWithManageApiKeyRole, null, "monitor");
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
getClientForRunAsUser().execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.forOwnedApiKeys(), listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
verifyInvalidateResponse(noOfApiKeysForUserWithManageApiKeyRole, userWithManageApiKeyRoleApiKeys, invalidateResponse);
}
public void testInvalidateApiKeysOwnedByRunAsUserWhenRunAsUserInfoIsGiven() throws InterruptedException, ExecutionException {
createUserWithRunAsRole();
int noOfSuperuserApiKeys = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageApiKeyRole = randomIntBetween(3, 5);
createApiKeys(noOfSuperuserApiKeys, null);
List<CreateApiKeyResponse> userWithManageApiKeyRoleApiKeys = createApiKeys("user_with_manage_own_api_key_role",
"user_with_run_as_role", noOfApiKeysForUserWithManageApiKeyRole, null, "monitor");
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
getClientForRunAsUser().execute(InvalidateApiKeyAction.INSTANCE,
InvalidateApiKeyRequest.usingRealmAndUserName("file", "user_with_manage_own_api_key_role"), listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
verifyInvalidateResponse(noOfApiKeysForUserWithManageApiKeyRole, userWithManageApiKeyRoleApiKeys, invalidateResponse);
}
public void testInvalidateApiKeysOwnedByRunAsUserWillNotWorkWhenAuthUserInfoIsGiven() throws InterruptedException, ExecutionException {
createUserWithRunAsRole();
int noOfSuperuserApiKeys = randomIntBetween(3, 5);
int noOfApiKeysForUserWithManageApiKeyRole = randomIntBetween(3, 5);
createApiKeys(noOfSuperuserApiKeys, null);
List<CreateApiKeyResponse> userWithManageApiKeyRoleApiKeys = createApiKeys("user_with_manage_own_api_key_role",
"user_with_run_as_role", noOfApiKeysForUserWithManageApiKeyRole, null, "monitor");
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
final Tuple<String,String> invalidRealmAndUserPair = randomFrom(
new Tuple<>("file", "user_with_run_as_role"),
new Tuple<>("index", "user_with_manage_own_api_key_role"),
new Tuple<>("index", "user_with_run_as_role"));
getClientForRunAsUser().execute(InvalidateApiKeyAction.INSTANCE,
InvalidateApiKeyRequest.usingRealmAndUserName(invalidRealmAndUserPair.v1(), invalidRealmAndUserPair.v2()), listener);
final ElasticsearchSecurityException e = expectThrows(ElasticsearchSecurityException.class, listener::actionGet);
assertThat(e.getMessage(), containsString(
"unauthorized for user [user_with_run_as_role] run as [user_with_manage_own_api_key_role]"));
}
public void testApiKeyAuthorizationApiKeyMustBeAbleToRetrieveItsOwnInformationButNotAnyOtherKeysCreatedBySameOwner()
throws InterruptedException, ExecutionException {
List<CreateApiKeyResponse> responses = createApiKeys(SecuritySettingsSource.TEST_SUPERUSER,2, null, (String[]) null);
final String base64ApiKeyKeyValue = Base64.getEncoder().encodeToString(
(responses.get(0).getId() + ":" + responses.get(0).getKey().toString()).getBytes(StandardCharsets.UTF_8));
Client client = client().filterWithHeader(Map.of("Authorization", "ApiKey " + base64ApiKeyKeyValue));
PlainActionFuture<GetApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingApiKeyId(responses.get(0).getId(), randomBoolean()), listener);
GetApiKeyResponse response = listener.get();
verifyGetResponse(1, responses, response, Collections.singleton(responses.get(0).getId()), null);
final PlainActionFuture<GetApiKeyResponse> failureListener = new PlainActionFuture<>();
// for any other API key id, it must deny access
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.usingApiKeyId(responses.get(1).getId(), randomBoolean()),
failureListener);
ElasticsearchSecurityException ese = expectThrows(ElasticsearchSecurityException.class, () -> failureListener.actionGet());
assertErrorMessage(ese, "cluster:admin/xpack/security/api_key/get", SecuritySettingsSource.TEST_SUPERUSER,
responses.get(0).getId());
final PlainActionFuture<GetApiKeyResponse> failureListener1 = new PlainActionFuture<>();
client.execute(GetApiKeyAction.INSTANCE, GetApiKeyRequest.forOwnedApiKeys(), failureListener1);
ese = expectThrows(ElasticsearchSecurityException.class, () -> failureListener1.actionGet());
assertErrorMessage(ese, "cluster:admin/xpack/security/api_key/get", SecuritySettingsSource.TEST_SUPERUSER,
responses.get(0).getId());
}
public void testApiKeyWithManageOwnPrivilegeIsAbleToInvalidateItselfButNotAnyOtherKeysCreatedBySameOwner()
throws InterruptedException, ExecutionException {
List<CreateApiKeyResponse> responses = createApiKeys(SecuritySettingsSource.TEST_SUPERUSER, 2, null, "manage_own_api_key");
final String base64ApiKeyKeyValue = Base64.getEncoder().encodeToString(
(responses.get(0).getId() + ":" + responses.get(0).getKey().toString()).getBytes(StandardCharsets.UTF_8));
Client client = client().filterWithHeader(Map.of("Authorization", "ApiKey " + base64ApiKeyKeyValue));
final PlainActionFuture<InvalidateApiKeyResponse> failureListener = new PlainActionFuture<>();
// for any other API key id, it must deny access
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.usingApiKeyId(responses.get(1).getId(), randomBoolean()),
failureListener);
ElasticsearchSecurityException ese = expectThrows(ElasticsearchSecurityException.class, () -> failureListener.actionGet());
assertErrorMessage(ese, "cluster:admin/xpack/security/api_key/invalidate", SecuritySettingsSource.TEST_SUPERUSER,
responses.get(0).getId());
final PlainActionFuture<InvalidateApiKeyResponse> failureListener1 = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.forOwnedApiKeys(), failureListener1);
ese = expectThrows(ElasticsearchSecurityException.class, () -> failureListener1.actionGet());
assertErrorMessage(ese, "cluster:admin/xpack/security/api_key/invalidate", SecuritySettingsSource.TEST_SUPERUSER,
responses.get(0).getId());
PlainActionFuture<InvalidateApiKeyResponse> listener = new PlainActionFuture<>();
client.execute(InvalidateApiKeyAction.INSTANCE, InvalidateApiKeyRequest.usingApiKeyId(responses.get(0).getId(), randomBoolean()),
listener);
InvalidateApiKeyResponse invalidateResponse = listener.get();
assertThat(invalidateResponse.getInvalidatedApiKeys().size(), equalTo(1));
assertThat(invalidateResponse.getInvalidatedApiKeys(), containsInAnyOrder(responses.get(0).getId()));
assertThat(invalidateResponse.getPreviouslyInvalidatedApiKeys().size(), equalTo(0));
assertThat(invalidateResponse.getErrors().size(), equalTo(0));
}
public void testDerivedKeys() throws ExecutionException, InterruptedException {
Client client = client().filterWithHeader(Collections.singletonMap("Authorization",
UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER,
SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
final CreateApiKeyResponse response = new CreateApiKeyRequestBuilder(client)
.setName("key-1")
.setRoleDescriptors(Collections.singletonList(
new RoleDescriptor("role", new String[] { "manage_api_key" }, null, null)))
.get();
assertEquals("key-1", response.getName());
assertNotNull(response.getId());
assertNotNull(response.getKey());
// use the first ApiKey for authorized action
final String base64ApiKeyKeyValue = Base64.getEncoder().encodeToString(
(response.getId() + ":" + response.getKey().toString()).getBytes(StandardCharsets.UTF_8));
final Client clientKey1 = client().filterWithHeader(Collections.singletonMap("Authorization", "ApiKey " + base64ApiKeyKeyValue));
final String expectedMessage = "creating derived api keys requires an explicit role descriptor that is empty";
final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class,
() -> new CreateApiKeyRequestBuilder(clientKey1).setName("key-2").get());
assertThat(e1.getMessage(), containsString(expectedMessage));
final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class,
() -> new CreateApiKeyRequestBuilder(clientKey1).setName("key-3")
.setRoleDescriptors(Collections.emptyList()).get());
assertThat(e2.getMessage(), containsString(expectedMessage));
final IllegalArgumentException e3 = expectThrows(IllegalArgumentException.class,
() -> new CreateApiKeyRequestBuilder(clientKey1).setName("key-4")
.setRoleDescriptors(Collections.singletonList(
new RoleDescriptor("role", new String[] {"manage_own_api_key"}, null, null)
)).get());
assertThat(e3.getMessage(), containsString(expectedMessage));
final List<RoleDescriptor> roleDescriptors = randomList(2, 10,
() -> new RoleDescriptor("role", null, null, null));
roleDescriptors.set(randomInt(roleDescriptors.size() - 1),
new RoleDescriptor("role", new String[] {"manage_own_api_key"}, null, null));
final IllegalArgumentException e4 = expectThrows(IllegalArgumentException.class,
() -> new CreateApiKeyRequestBuilder(clientKey1).setName("key-5")
.setRoleDescriptors(roleDescriptors).get());
assertThat(e4.getMessage(), containsString(expectedMessage));
final CreateApiKeyResponse key100Response = new CreateApiKeyRequestBuilder(clientKey1).setName("key-100")
.setRoleDescriptors(Collections.singletonList(
new RoleDescriptor("role", null, null, null)
)).get();
assertEquals("key-100", key100Response.getName());
assertNotNull(key100Response.getId());
assertNotNull(key100Response.getKey());
// Check at the end to allow sometime for the operation to happen. Since an erroneous creation is
// asynchronous so that the document is not available immediately.
assertApiKeyNotCreated(client, "key-2");
assertApiKeyNotCreated(client, "key-3");
assertApiKeyNotCreated(client, "key-4");
assertApiKeyNotCreated(client, "key-5");
}
public void testAuthenticationReturns429WhenThreadPoolIsSaturated() throws IOException, InterruptedException, ExecutionException {
final String nodeName = randomFrom(internalCluster().getNodeNames());
final Settings settings = internalCluster().getInstance(Settings.class, nodeName);
final int allocatedProcessors = EsExecutors.allocatedProcessors(settings);
final ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName);
final RoleDescriptor descriptor = new RoleDescriptor("auth_only", new String[] { }, null, null);
final Client client = client().filterWithHeader(Collections.singletonMap("Authorization",
UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER,
SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
final CreateApiKeyResponse createApiKeyResponse = new CreateApiKeyRequestBuilder(client)
.setName("auth only key")
.setRoleDescriptors(Collections.singletonList(descriptor))
.get();
assertNotNull(createApiKeyResponse.getId());
assertNotNull(createApiKeyResponse.getKey());
final List<NodeInfo> nodeInfos = client().admin().cluster().prepareNodesInfo().get().getNodes().stream()
.filter(nodeInfo -> nodeInfo.getNode().getName().equals(nodeName))
.collect(Collectors.toList());
assertEquals(1, nodeInfos.size());
final ExecutorService executorService = threadPool.executor(SECURITY_CRYPTO_THREAD_POOL_NAME);
final int numberOfThreads = (allocatedProcessors + 1) / 2;
final CountDownLatch blockingLatch = new CountDownLatch(1);
final CountDownLatch readyLatch = new CountDownLatch(numberOfThreads);
for (int i = 0; i < numberOfThreads; i++) {
executorService.submit(() -> {
readyLatch.countDown();
try {
blockingLatch.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
});
}
// Make sure above tasks are running
readyLatch.await();
// Then fill the whole queue for the crypto thread pool
Future<?> lastTaskFuture = null;
int i = 0;
try {
for (i = 0; i < CRYPTO_THREAD_POOL_QUEUE_SIZE; i++) {
lastTaskFuture = executorService.submit(() -> { });
}
} catch (EsRejectedExecutionException e) {
logger.info("Attempted to push {} tasks but only pushed {}", CRYPTO_THREAD_POOL_QUEUE_SIZE, i + 1);
}
try (RestClient restClient = createRestClient(nodeInfos, null, "http")) {
final String base64ApiKeyKeyValue = Base64.getEncoder().encodeToString(
(createApiKeyResponse.getId() + ":" + createApiKeyResponse.getKey().toString()).getBytes(StandardCharsets.UTF_8));
final Request authRequest = new Request("GET", "_security/_authenticate");
authRequest.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader(
"Authorization", "ApiKey " + base64ApiKeyKeyValue).build());
final ResponseException responseException = expectThrows(ResponseException.class, () -> restClient.performRequest(authRequest));
assertThat(responseException.getMessage(), containsString("429 Too Many Requests"));
assertThat(responseException.getResponse().getStatusLine().getStatusCode(), is(429));
} finally {
blockingLatch.countDown();
if (lastTaskFuture != null) {
lastTaskFuture.get();
}
}
}
private void assertApiKeyNotCreated(Client client, String keyName) throws ExecutionException, InterruptedException {
new RefreshRequestBuilder(client, RefreshAction.INSTANCE).setIndices(SECURITY_MAIN_ALIAS).execute().get();
assertEquals(0, client.execute(GetApiKeyAction.INSTANCE,
GetApiKeyRequest.usingApiKeyName(keyName, false)).get().getApiKeyInfos().length);
}
private void verifyGetResponse(int expectedNumberOfApiKeys, List<CreateApiKeyResponse> responses,
GetApiKeyResponse response, Set<String> validApiKeyIds, List<String> invalidatedApiKeyIds) {
verifyGetResponse(SecuritySettingsSource.TEST_SUPERUSER, expectedNumberOfApiKeys, responses, response, validApiKeyIds,
invalidatedApiKeyIds);
}
private void verifyGetResponse(String user, int expectedNumberOfApiKeys, List<CreateApiKeyResponse> responses,
GetApiKeyResponse response, Set<String> validApiKeyIds, List<String> invalidatedApiKeyIds) {
verifyGetResponse(new String[]{user}, expectedNumberOfApiKeys, responses, response, validApiKeyIds, invalidatedApiKeyIds);
}
private void verifyGetResponse(String[] user, int expectedNumberOfApiKeys, List<CreateApiKeyResponse> responses,
GetApiKeyResponse response, Set<String> validApiKeyIds, List<String> invalidatedApiKeyIds) {
assertThat(response.getApiKeyInfos().length, equalTo(expectedNumberOfApiKeys));
List<String> expectedIds = responses.stream().filter(o -> validApiKeyIds.contains(o.getId())).map(o -> o.getId())
.collect(Collectors.toList());
List<String> actualIds = Arrays.stream(response.getApiKeyInfos()).filter(o -> o.isInvalidated() == false).map(o -> o.getId())
.collect(Collectors.toList());
assertThat(actualIds, containsInAnyOrder(expectedIds.toArray(Strings.EMPTY_ARRAY)));
List<String> expectedNames = responses.stream().filter(o -> validApiKeyIds.contains(o.getId())).map(o -> o.getName())
.collect(Collectors.toList());
List<String> actualNames = Arrays.stream(response.getApiKeyInfos()).filter(o -> o.isInvalidated() == false).map(o -> o.getName())
.collect(Collectors.toList());
assertThat(actualNames, containsInAnyOrder(expectedNames.toArray(Strings.EMPTY_ARRAY)));
Set<String> expectedUsernames = (validApiKeyIds.isEmpty()) ? Collections.emptySet()
: Set.of(user);
Set<String> actualUsernames = Arrays.stream(response.getApiKeyInfos()).filter(o -> o.isInvalidated() == false)
.map(o -> o.getUsername()).collect(Collectors.toSet());
assertThat(actualUsernames, containsInAnyOrder(expectedUsernames.toArray(Strings.EMPTY_ARRAY)));
if (invalidatedApiKeyIds != null) {
List<String> actualInvalidatedApiKeyIds = Arrays.stream(response.getApiKeyInfos()).filter(o -> o.isInvalidated())
.map(o -> o.getId()).collect(Collectors.toList());
assertThat(invalidatedApiKeyIds, containsInAnyOrder(actualInvalidatedApiKeyIds.toArray(Strings.EMPTY_ARRAY)));
}
}
private List<CreateApiKeyResponse> createApiKeys(int noOfApiKeys, TimeValue expiration) {
return createApiKeys(SecuritySettingsSource.TEST_SUPERUSER, noOfApiKeys, expiration, "monitor");
}
private List<CreateApiKeyResponse> createApiKeys(String user, int noOfApiKeys, TimeValue expiration, String... clusterPrivileges) {
final Map<String, String> headers = Collections.singletonMap(
"Authorization", UsernamePasswordToken.basicAuthHeaderValue(user, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING));
return createApiKeys(headers, noOfApiKeys, expiration, clusterPrivileges);
}
private List<CreateApiKeyResponse> createApiKeys(String owningUser, String authenticatingUser,
int noOfApiKeys, TimeValue expiration, String... clusterPrivileges) {
final Map<String, String> headers = Map.of("Authorization",
UsernamePasswordToken.basicAuthHeaderValue(authenticatingUser, SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING),
"es-security-runas-user", owningUser);
return createApiKeys(headers, noOfApiKeys, expiration, clusterPrivileges);
}
private List<CreateApiKeyResponse> createApiKeys(Map<String, String> headers,
int noOfApiKeys, TimeValue expiration, String... clusterPrivileges) {
List<CreateApiKeyResponse> responses = new ArrayList<>();
for (int i = 0; i < noOfApiKeys; i++) {
final RoleDescriptor descriptor = new RoleDescriptor("role", clusterPrivileges, null, null);
Client client = client().filterWithHeader(headers);
final CreateApiKeyResponse response = new CreateApiKeyRequestBuilder(client)
.setName("test-key-" + randomAlphaOfLengthBetween(5, 9) + i).setExpiration(expiration)
.setRoleDescriptors(Collections.singletonList(descriptor)).get();
assertNotNull(response.getId());
assertNotNull(response.getKey());
responses.add(response);
}
assertThat(responses.size(), is(noOfApiKeys));
return responses;
}
/**
* In order to have negative tests for realm name mismatch, user_with_run_as_role
* needs to be created in a different realm other than file (which is handled by configureUsers()).
* This new helper method creates the user in the native realm.
*/
private void createUserWithRunAsRole() throws ExecutionException, InterruptedException {
final PutUserRequest putUserRequest = new PutUserRequest();
putUserRequest.username("user_with_run_as_role");
putUserRequest.roles("run_as_role");
putUserRequest.passwordHash(SecuritySettingsSource.TEST_PASSWORD_HASHED.toCharArray());
PlainActionFuture<PutUserResponse> listener = new PlainActionFuture<>();
final Client client = client().filterWithHeader(Map.of("Authorization",
UsernamePasswordToken.basicAuthHeaderValue(SecuritySettingsSource.TEST_SUPERUSER,
SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING)));
client.execute(PutUserAction.INSTANCE, putUserRequest, listener);
final PutUserResponse putUserResponse = listener.get();
assertTrue(putUserResponse.created());
}
private Client getClientForRunAsUser() {
return client().filterWithHeader(Map.of("Authorization", UsernamePasswordToken
.basicAuthHeaderValue("user_with_run_as_role", SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING),
"es-security-runas-user", "user_with_manage_own_api_key_role"));
}
private void assertErrorMessage(final ElasticsearchSecurityException ese, String action, String userName, String apiKeyId) {
assertThat(ese.getMessage(),
is("action [" + action + "] is unauthorized for API key id [" + apiKeyId + "] of user [" + userName + "]"));
}
private void assertErrorMessage(final ElasticsearchSecurityException ese, String action, String userName) {
assertThat(ese.getMessage(),
is("action [" + action + "] is unauthorized for user [" + userName + "]"));
}
}
|
package com.xpn.xwiki.doc;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.ResourceBundle;
import java.util.Set;
import org.apache.velocity.VelocityContext;
import org.jmock.Mock;
import org.jmock.core.Invocation;
import org.jmock.core.stub.CustomStub;
import org.xwiki.context.Execution;
import org.xwiki.model.reference.DocumentReference;
import org.xwiki.rendering.syntax.Syntax;
import org.xwiki.test.internal.MockConfigurationSource;
import org.xwiki.velocity.VelocityEngine;
import org.xwiki.velocity.VelocityManager;
import com.xpn.xwiki.XWiki;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
import com.xpn.xwiki.api.DocumentSection;
import com.xpn.xwiki.objects.BaseObject;
import com.xpn.xwiki.objects.classes.BaseClass;
import com.xpn.xwiki.objects.classes.TextAreaClass;
import com.xpn.xwiki.store.XWikiStoreInterface;
import com.xpn.xwiki.store.XWikiVersioningStoreInterface;
import com.xpn.xwiki.test.AbstractBridgedXWikiComponentTestCase;
import com.xpn.xwiki.user.api.XWikiRightService;
import com.xpn.xwiki.web.XWikiMessageTool;
/**
* Unit tests for {@link XWikiDocument}.
*
* @version $Id$
*/
public class XWikiDocumentTest extends AbstractBridgedXWikiComponentTestCase
{
private static final String DOCWIKI = "Wiki";
private static final String DOCSPACE = "Space";
private static final String DOCNAME = "Page";
private static final String DOCFULLNAME = DOCSPACE + "." + DOCNAME;
private static final DocumentReference DOCUMENT_REFERENCE = new DocumentReference(DOCWIKI, DOCSPACE, DOCNAME);
private static final String CLASSNAME = DOCFULLNAME;
private static final DocumentReference CLASS_REFERENCE = DOCUMENT_REFERENCE;
private XWikiDocument document;
private XWikiDocument translatedDocument;
private Mock mockXWiki;
private Mock mockXWikiVersioningStore;
private Mock mockXWikiStoreInterface;
private Mock mockXWikiMessageTool;
private Mock mockXWikiRightService;
private Mock mockVelocityManager;
private Mock mockVelocityEngine;
private CustomStub velocityEngineEvaluateStub;
private BaseClass baseClass;
private BaseObject baseObject;
private BaseObject baseObject2;
@Override
protected void setUp() throws Exception
{
super.setUp();
this.document = new XWikiDocument(new DocumentReference(DOCWIKI, DOCSPACE, DOCNAME));
this.document.setSyntax(Syntax.XWIKI_1_0);
this.document.setLanguage("en");
this.document.setDefaultLanguage("en");
this.document.setNew(false);
this.translatedDocument = new XWikiDocument();
this.translatedDocument.setSyntax(Syntax.XWIKI_2_0);
this.translatedDocument.setLanguage("fr");
this.translatedDocument.setNew(false);
getContext().put("isInRenderingEngine", true);
this.mockXWiki = mock(XWiki.class);
this.mockXWikiVersioningStore = mock(XWikiVersioningStoreInterface.class);
this.mockXWikiVersioningStore.stubs().method("getXWikiDocumentArchive").will(returnValue(null));
this.mockXWikiStoreInterface = mock(XWikiStoreInterface.class);
this.document.setStore((XWikiStoreInterface) this.mockXWikiStoreInterface.proxy());
this.mockXWikiMessageTool =
mock(XWikiMessageTool.class, new Class[] { ResourceBundle.class, XWikiContext.class }, new Object[] { null,
getContext() });
this.mockXWikiMessageTool.stubs().method("get").will(returnValue("message"));
this.mockXWikiRightService = mock(XWikiRightService.class);
this.mockXWikiRightService.stubs().method("hasProgrammingRights").will(returnValue(true));
this.mockXWiki.stubs().method("getVersioningStore").will(returnValue(this.mockXWikiVersioningStore.proxy()));
this.mockXWiki.stubs().method("getStore").will(returnValue(this.mockXWikiStoreInterface.proxy()));
this.mockXWiki.stubs().method("getDocument").will(returnValue(this.document));
this.mockXWiki.stubs().method("getLanguagePreference").will(returnValue("en"));
this.mockXWiki.stubs().method("getSectionEditingDepth").will(returnValue(2L));
this.mockXWiki.stubs().method("getRightService").will(returnValue(this.mockXWikiRightService.proxy()));
this.mockXWiki.stubs().method("exists").will(returnValue(false));
this.mockXWiki.stubs().method("evaluateTemplate").will(returnValue(""));
getContext().setWiki((XWiki) this.mockXWiki.proxy());
getContext().put("msg", this.mockXWikiMessageTool.proxy());
this.baseClass = this.document.getXClass();
this.baseClass.addTextField("string", "String", 30);
this.baseClass.addTextAreaField("area", "Area", 10, 10);
this.baseClass.addTextAreaField("puretextarea", "Pure text area", 10, 10);
// set the text areas an non interpreted content
((TextAreaClass) this.baseClass.getField("puretextarea")).setContentType("puretext");
this.baseClass.addPasswordField("passwd", "Password", 30);
this.baseClass.addBooleanField("boolean", "Boolean", "yesno");
this.baseClass.addNumberField("int", "Int", 10, "integer");
this.baseClass.addStaticListField("stringlist", "StringList", "value1, value2");
this.mockXWiki.stubs().method("getClass").will(returnValue(this.baseClass));
this.mockXWiki.stubs().method("getXClass").will(returnValue(this.baseClass));
this.baseObject = this.document.newObject(CLASSNAME, getContext());
this.baseObject.setStringValue("string", "string");
this.baseObject.setLargeStringValue("area", "area");
this.baseObject.setStringValue("passwd", "passwd");
this.baseObject.setIntValue("boolean", 1);
this.baseObject.setIntValue("int", 42);
this.baseObject.setStringListValue("stringlist", Arrays.asList("VALUE1", "VALUE2"));
this.baseObject2 = this.baseObject.clone();
this.document.addXObject(this.baseObject2);
this.mockXWikiStoreInterface.stubs().method("search").will(returnValue(new ArrayList<XWikiDocument>()));
}
@Override
protected void registerComponents() throws Exception
{
super.registerComponents();
// Mock xwiki.cfg
getComponentManager().registerComponent(MockConfigurationSource.getDescriptor("xwikicfg"),
getConfigurationSource());
// Setup the mock Velocity engine.
this.mockVelocityManager = registerMockComponent(VelocityManager.class);
this.mockVelocityEngine = mock(VelocityEngine.class);
this.mockVelocityManager.stubs().method("getVelocityContext").will(returnValue(null));
this.mockVelocityManager.stubs().method("getVelocityEngine").will(returnValue(this.mockVelocityEngine.proxy()));
velocityEngineEvaluateStub = new CustomStub("Implements VelocityEngine.evaluate")
{
@Override
public Object invoke(Invocation invocation) throws Throwable
{
// Output the given text without changes.
StringWriter writer = (StringWriter) invocation.parameterValues.get(1);
String text = (String) invocation.parameterValues.get(3);
writer.append(text);
return true;
}
};
this.mockVelocityEngine.stubs().method("evaluate").will(velocityEngineEvaluateStub);
this.mockVelocityEngine.stubs().method("startedUsingMacroNamespace");
this.mockVelocityEngine.stubs().method("stoppedUsingMacroNamespace");
}
public void testGetUniqueLinkedPages10()
{
XWikiDocument contextDocument =
new XWikiDocument(new DocumentReference("contextdocwiki", "contextdocspace", "contextdocpage"));
getContext().setDoc(contextDocument);
this.mockXWiki.stubs().method("exists").will(returnValue(true));
this.document.setContent("[TargetPage][TargetLabel>TargetPage][TargetSpace.TargetPage]"
+ "[TargetLabel>TargetSpace.TargetPage?param=value
Set<String> linkedPages = this.document.getUniqueLinkedPages(getContext());
assertEquals(new HashSet<String>(Arrays.asList("TargetPage", "TargetSpace.TargetPage")), new HashSet<String>(
linkedPages));
}
public void testGetUniqueLinkedPages()
{
XWikiDocument contextDocument =
new XWikiDocument(new DocumentReference("contextdocwiki", "contextdocspace", "contextdocpage"));
getContext().setDoc(contextDocument);
this.document.setContent("[[TargetPage]][[TargetLabel>>TargetPage]][[TargetSpace.TargetPage]]"
+ "[[TargetLabel>>TargetSpace.TargetPage?param=value
+ "[[]][[#anchor]][[?param=value]][[targetwiki:TargetSpace.TargetPage]]");
this.document.setSyntax(Syntax.XWIKI_2_0);
Set<String> linkedPages = this.document.getUniqueLinkedPages(getContext());
assertEquals(
new LinkedHashSet<String>(Arrays.asList("Space.TargetPage.WebHome", "TargetSpace.TargetPage.WebHome",
"targetwiki:TargetSpace.TargetPage.WebHome")), linkedPages);
}
public void testGetSections10() throws XWikiException
{
this.document.setContent("content not in section\n" + "1 header 1\nheader 1 content\n"
+ "1.1 header 2\nheader 2 content");
List<DocumentSection> headers = this.document.getSections();
assertEquals(2, headers.size());
DocumentSection header1 = headers.get(0);
DocumentSection header2 = headers.get(1);
assertEquals("header 1", header1.getSectionTitle());
assertEquals(23, header1.getSectionIndex());
assertEquals(1, header1.getSectionNumber());
assertEquals("1", header1.getSectionLevel());
assertEquals("header 2", header2.getSectionTitle());
assertEquals(51, header2.getSectionIndex());
assertEquals(2, header2.getSectionNumber());
assertEquals("1.1", header2.getSectionLevel());
}
public void testGetSections() throws XWikiException
{
this.document.setContent("content not in section\n" + "= header 1=\nheader 1 content\n"
+ "== header 2==\nheader 2 content");
this.document.setSyntax(Syntax.XWIKI_2_0);
List<DocumentSection> headers = this.document.getSections();
assertEquals(2, headers.size());
DocumentSection header1 = headers.get(0);
DocumentSection header2 = headers.get(1);
assertEquals("header 1", header1.getSectionTitle());
assertEquals(-1, header1.getSectionIndex());
assertEquals(1, header1.getSectionNumber());
assertEquals("1", header1.getSectionLevel());
assertEquals("header 2", header2.getSectionTitle());
assertEquals(-1, header2.getSectionIndex());
assertEquals(2, header2.getSectionNumber());
assertEquals("1.1", header2.getSectionLevel());
}
public void testGetDocumentSection10() throws XWikiException
{
this.document.setContent("content not in section\n" + "1 header 1\nheader 1 content\n"
+ "1.1 header 2\nheader 2 content");
DocumentSection header1 = this.document.getDocumentSection(1);
DocumentSection header2 = this.document.getDocumentSection(2);
assertEquals("header 1", header1.getSectionTitle());
assertEquals(23, header1.getSectionIndex());
assertEquals(1, header1.getSectionNumber());
assertEquals("1", header1.getSectionLevel());
assertEquals("header 2", header2.getSectionTitle());
assertEquals(51, header2.getSectionIndex());
assertEquals(2, header2.getSectionNumber());
assertEquals("1.1", header2.getSectionLevel());
}
public void testGetDocumentSection() throws XWikiException
{
this.document.setContent("content not in section\n" + "= header 1=\nheader 1 content\n"
+ "== header 2==\nheader 2 content");
this.document.setSyntax(Syntax.XWIKI_2_0);
DocumentSection header1 = this.document.getDocumentSection(1);
DocumentSection header2 = this.document.getDocumentSection(2);
assertEquals("header 1", header1.getSectionTitle());
assertEquals(-1, header1.getSectionIndex());
assertEquals(1, header1.getSectionNumber());
assertEquals("1", header1.getSectionLevel());
assertEquals("header 2", header2.getSectionTitle());
assertEquals(-1, header2.getSectionIndex());
assertEquals(2, header2.getSectionNumber());
assertEquals("1.1", header2.getSectionLevel());
}
public void testGetDocumentSectionWhenSectionInGroups() throws XWikiException
{
this.document.setContent("= Heading1 =\n" + "para1\n" + "== Heading2 ==\n" + "para2\n" + "(((\n"
+ "== Heading3 ==\n" + "para3\n" + "(((\n" + "== Heading4 ==\n" + "para4\n" + ")))\n" + ")))\n"
+ "== Heading5 ==\n" + "para5\n");
this.document.setSyntax(Syntax.XWIKI_2_0);
DocumentSection section = this.document.getDocumentSection(3);
assertEquals("Heading5", section.getSectionTitle());
}
public void testGetContentOfSection10() throws XWikiException
{
this.document.setContent("content not in section\n" + "1 header 1\nheader 1 content\n"
+ "1.1 header 2\nheader 2 content");
String content1 = this.document.getContentOfSection(1);
String content2 = this.document.getContentOfSection(2);
assertEquals("1 header 1\nheader 1 content\n1.1 header 2\nheader 2 content", content1);
assertEquals("1.1 header 2\nheader 2 content", content2);
}
public void testGetContentOfSection() throws XWikiException
{
this.document.setContent("content not in section\n" + "= header 1=\nheader 1 content\n"
+ "== header 2==\nheader 2 content\n" + "=== header 3===\nheader 3 content\n"
+ "== header 4==\nheader 4 content");
this.document.setSyntax(Syntax.XWIKI_2_0);
String content1 = this.document.getContentOfSection(1);
String content2 = this.document.getContentOfSection(2);
String content3 = this.document.getContentOfSection(3);
assertEquals("= header 1 =\n\nheader 1 content\n\n== header 2 ==\n\nheader 2 content\n\n"
+ "=== header 3 ===\n\nheader 3 content\n\n== header 4 ==\n\nheader 4 content", content1);
assertEquals("== header 2 ==\n\nheader 2 content\n\n=== header 3 ===\n\nheader 3 content", content2);
assertEquals("== header 4 ==\n\nheader 4 content", content3);
// Validate that third level header is not skipped anymore
this.mockXWiki.stubs().method("getSectionEditingDepth").will(returnValue(3L));
content3 = this.document.getContentOfSection(3);
String content4 = this.document.getContentOfSection(4);
assertEquals("=== header 3 ===\n\nheader 3 content", content3);
assertEquals("== header 4 ==\n\nheader 4 content", content4);
}
public void testSectionSplit10() throws XWikiException
{
List<DocumentSection> sections;
// Simple test
this.document.setContent("1 Section 1\n" + "Content of first section\n" + "1.1 Subsection 2\n"
+ "Content of second section\n" + "1 Section 3\n" + "Content of section 3");
sections = this.document.getSections();
assertEquals(3, sections.size());
assertEquals("Section 1", sections.get(0).getSectionTitle());
assertEquals("1 Section 1\n" + "Content of first section\n" + "1.1 Subsection 2\n"
+ "Content of second section\n", this.document.getContentOfSection(1));
assertEquals("1.1", sections.get(1).getSectionLevel());
assertEquals("1.1 Subsection 2\nContent of second section\n", this.document.getContentOfSection(2));
assertEquals(3, sections.get(2).getSectionNumber());
assertEquals(80, sections.get(2).getSectionIndex());
assertEquals("1 Section 3\nContent of section 3", this.document.getContentOfSection(3));
// Test comments don't break the section editing
this.document.setContent("1 Section 1\n" + "Content of first section\n" + "## 1.1 Subsection 2\n"
+ "Content of second section\n" + "1 Section 3\n" + "Content of section 3");
sections = this.document.getSections();
assertEquals(2, sections.size());
assertEquals("Section 1", sections.get(0).getSectionTitle());
assertEquals("1", sections.get(1).getSectionLevel());
assertEquals(2, sections.get(1).getSectionNumber());
assertEquals(83, sections.get(1).getSectionIndex());
// Test spaces are ignored
this.document.setContent("1 Section 1\n" + "Content of first section\n" + " 1.1 Subsection 2 \n"
+ "Content of second section\n" + "1 Section 3\n" + "Content of section 3");
sections = this.document.getSections();
assertEquals(3, sections.size());
assertEquals("Subsection 2 ", sections.get(1).getSectionTitle());
assertEquals("1.1", sections.get(1).getSectionLevel());
// Test lower headings are ignored
this.document.setContent("1 Section 1\n" + "Content of first section\n" + "1.1.1 Lower subsection\n"
+ "This content is not important\n" + " 1.1 Subsection 2 \n" + "Content of second section\n"
+ "1 Section 3\n" + "Content of section 3");
sections = this.document.getSections();
assertEquals(3, sections.size());
assertEquals("Section 1", sections.get(0).getSectionTitle());
assertEquals("Subsection 2 ", sections.get(1).getSectionTitle());
assertEquals("1.1", sections.get(1).getSectionLevel());
// Test blank lines are preserved
this.document
.setContent("\n\n1 Section 1\n\n\n" + "Content of first section\n\n\n" + " 1.1 Subsection 2 \n\n"
+ "Content of second section\n" + "1 Section 3\n" + "Content of section 3");
sections = this.document.getSections();
assertEquals(3, sections.size());
assertEquals(2, sections.get(0).getSectionIndex());
assertEquals("Subsection 2 ", sections.get(1).getSectionTitle());
assertEquals(43, sections.get(1).getSectionIndex());
}
public void testUpdateDocumentSection10() throws XWikiException
{
List<DocumentSection> sections;
// Fill the document
this.document.setContent("1 Section 1\n" + "Content of first section\n" + "1.1 Subsection 2\n"
+ "Content of second section\n" + "1 Section 3\n" + "Content of section 3");
String content = this.document.updateDocumentSection(3, "1 Section 3\n" + "Modified content of section 3");
assertEquals("1 Section 1\n" + "Content of first section\n" + "1.1 Subsection 2\n"
+ "Content of second section\n" + "1 Section 3\n" + "Modified content of section 3", content);
this.document.setContent(content);
sections = this.document.getSections();
assertEquals(3, sections.size());
assertEquals("Section 1", sections.get(0).getSectionTitle());
assertEquals("1 Section 1\n" + "Content of first section\n" + "1.1 Subsection 2\n"
+ "Content of second section\n", this.document.getContentOfSection(1));
assertEquals("1.1", sections.get(1).getSectionLevel());
assertEquals("1.1 Subsection 2\nContent of second section\n", this.document.getContentOfSection(2));
assertEquals(3, sections.get(2).getSectionNumber());
assertEquals(80, sections.get(2).getSectionIndex());
assertEquals("1 Section 3\nModified content of section 3", this.document.getContentOfSection(3));
}
public void testUpdateDocumentSection() throws XWikiException
{
this.document.setContent("content not in section\n" + "= header 1=\nheader 1 content\n"
+ "== header 2==\nheader 2 content");
this.document.setSyntax(Syntax.XWIKI_2_0);
// Modify section content
String content1 = this.document.updateDocumentSection(2, "== header 2==\nmodified header 2 content");
assertEquals(
"content not in section\n\n= header 1 =\n\nheader 1 content\n\n== header 2 ==\n\nmodified header 2 content",
content1);
String content2 =
this.document.updateDocumentSection(1,
"= header 1 =\n\nmodified also header 1 content\n\n== header 2 ==\n\nheader 2 content");
assertEquals(
"content not in section\n\n= header 1 =\n\nmodified also header 1 content\n\n== header 2 ==\n\nheader 2 content",
content2);
// Remove a section
String content3 = this.document.updateDocumentSection(2, "");
assertEquals("content not in section\n\n= header 1 =\n\nheader 1 content", content3);
}
public void testDisplay()
{
this.mockXWiki.stubs().method("getCurrentContentSyntaxId").will(returnValue("xwiki/2.0"));
this.document.setSyntax(Syntax.XWIKI_2_0);
assertEquals("string", this.document.display("string", "view", getContext()));
assertEquals(
"{{html clean=\"false\" wiki=\"false\"}}<input size='30' id='Space.Page_0_string' value='string' name='Space.Page_0_string' type='text'/>{{/html}}",
this.document.display("string", "edit", getContext()));
assertEquals("{{html clean=\"false\" wiki=\"false\"}}<p>area</p>{{/html}}",
this.document.display("area", "view", getContext()));
}
public void testDisplay1020()
{
this.mockXWiki.stubs().method("getCurrentContentSyntaxId").will(returnValue("xwiki/1.0"));
XWikiDocument doc10 = new XWikiDocument();
doc10.setSyntax(Syntax.XWIKI_1_0);
getContext().setDoc(doc10);
this.document.setSyntax(Syntax.XWIKI_2_0);
assertEquals("string", this.document.display("string", "view", getContext()));
assertEquals(
"{pre}<input size='30' id='Space.Page_0_string' value='string' name='Space.Page_0_string' type='text'/>{/pre}",
this.document.display("string", "edit", getContext()));
assertEquals("<p>area</p>", this.document.display("area", "view", getContext()));
}
public void testDisplayTemplate20()
{
this.mockXWiki.stubs().method("getCurrentContentSyntaxId").will(returnValue("xwiki/2.0"));
getContext().put("isInRenderingEngine", false);
this.document.setSyntax(Syntax.XWIKI_2_0);
assertEquals("string", this.document.display("string", "view", getContext()));
assertEquals(
"<input size='30' id='Space.Page_0_string' value='string' name='Space.Page_0_string' type='text'/>",
this.document.display("string", "edit", getContext()));
assertEquals("<p>area</p>", this.document.display("area", "view", getContext()));
}
public void testConvertSyntax() throws XWikiException
{
this.document.setSyntax(Syntax.HTML_4_01);
this.document.setContent("<p>content not in section</p>"
+ "<h1>header 1</h1><p>header 1 content</p>"
+ "<h2>header 2</h2><p>header 2 content</p>");
this.baseObject.setLargeStringValue("area", "<p>object content not in section</p>"
+ "<h1>object header 1</h1><p>object header 1 content</p>"
+ "<h2>object header 2</h2><p>object header 2 content</p>");
this.baseObject.setLargeStringValue("puretextarea", "<p>object content not in section</p>"
+ "<h1>object header 1</h1><p>object header 1 content</p>"
+ "<h2>object header 2</h2><p>object header 2 content</p>");
this.document.convertSyntax("xwiki/2.0", getContext());
assertEquals("content not in section\n\n" + "= header 1 =\n\nheader 1 content\n\n"
+ "== header 2 ==\n\nheader 2 content", this.document.getContent());
assertEquals("object content not in section\n\n" + "= object header 1 =\n\nobject header 1 content\n\n"
+ "== object header 2 ==\n\nobject header 2 content", this.baseObject.getStringValue("area"));
assertEquals("<p>object content not in section</p>"
+ "<h1>object header 1</h1><p>object header 1 content</p>"
+ "<h2>object header 2</h2><p>object header 2 content</p>", this.baseObject.getStringValue("puretextarea"));
assertEquals("xwiki/2.0", this.document.getSyntaxId());
}
public void testGetRenderedContent() throws XWikiException
{
this.document.setContent("**bold**");
this.document.setSyntax(Syntax.XWIKI_2_0);
assertEquals("<p><strong>bold</strong></p>", this.document.getRenderedContent(getContext()));
this.translatedDocument = new XWikiDocument(this.document.getDocumentReference(), Locale.FRENCH);
this.translatedDocument.setContent("//italic//");
this.translatedDocument.setSyntax(Syntax.XWIKI_1_0);
this.translatedDocument.setNew(false);
this.mockXWiki.stubs().method("getLanguagePreference").will(returnValue(Locale.FRENCH.toString()));
this.mockXWiki
.stubs()
.method("getDocument")
.with(
eq(new DocumentReference(this.translatedDocument.getDocumentReference(),
this.translatedDocument.getLocale())), ANYTHING).will(returnValue(this.translatedDocument));
assertEquals("<p><em>italic</em></p>", this.document.getRenderedContent(getContext()));
}
public void testGetRenderedContentWithSourceSyntax()
{
this.document.setSyntax(Syntax.XWIKI_1_0);
assertEquals("<p><strong>bold</strong></p>",
this.document.getRenderedContent("**bold**", "xwiki/2.0", getContext()));
}
public void testRename() throws XWikiException
{
// Possible ways to write parents, include documents, or make links:
// "database:space.name" (no change)
this.document.setContent("[[doc:pageinsamespace]]");
this.document.setSyntax(Syntax.XWIKI_2_1);
DocumentReference targetReference = new DocumentReference("newwikiname", "newspace", "newpage");
XWikiDocument targetDocument = this.document.duplicate(targetReference);
targetDocument.setStore((XWikiStoreInterface) this.mockXWikiStoreInterface.proxy());
DocumentReference reference1 = new DocumentReference(DOCWIKI, DOCSPACE, "Page1");
XWikiDocument doc1 = new XWikiDocument(reference1);
doc1.setContent("[[doc:" + DOCWIKI + ":" + DOCSPACE + "." + DOCNAME + "]] [[someName>>doc:" + DOCSPACE + "."
+ DOCNAME + "]] [[doc:" + DOCNAME + "]]");
doc1.setSyntax(Syntax.XWIKI_2_1);
doc1.setStore((XWikiStoreInterface) this.mockXWikiStoreInterface.proxy());
DocumentReference reference2 = new DocumentReference("newwikiname", DOCSPACE, "Page2");
XWikiDocument doc2 = new XWikiDocument(reference2);
doc2.setContent("[[doc:" + DOCWIKI + ":" + DOCSPACE + "." + DOCNAME + "]]");
doc2.setSyntax(Syntax.XWIKI_2_1);
doc2.setStore((XWikiStoreInterface) this.mockXWikiStoreInterface.proxy());
DocumentReference reference3 = new DocumentReference("newwikiname", "newspace", "Page3");
XWikiDocument doc3 = new XWikiDocument(reference3);
doc3.setContent("[[doc:" + DOCWIKI + ":" + DOCSPACE + "." + DOCNAME + "]]");
doc3.setSyntax(Syntax.XWIKI_2_1);
doc3.setStore((XWikiStoreInterface) this.mockXWikiStoreInterface.proxy());
// Test to make sure it also drags children along.
DocumentReference reference4 = new DocumentReference(DOCWIKI, DOCSPACE, "Page4");
XWikiDocument doc4 = new XWikiDocument(reference4);
doc4.setParent(DOCSPACE + "." + DOCNAME);
doc4.setStore((XWikiStoreInterface) this.mockXWikiStoreInterface.proxy());
DocumentReference reference5 = new DocumentReference("newwikiname", "newspace", "Page5");
XWikiDocument doc5 = new XWikiDocument(reference5);
doc5.setParent(DOCWIKI + ":" + DOCSPACE + "." + DOCNAME);
doc5.setStore((XWikiStoreInterface) this.mockXWikiStoreInterface.proxy());
this.mockXWiki.stubs().method("copyDocument").will(returnValue(true));
this.mockXWiki.stubs().method("getDocument").with(eq(targetReference), ANYTHING)
.will(returnValue(targetDocument));
this.mockXWiki.stubs().method("getDocument").with(eq(reference1), ANYTHING).will(returnValue(doc1));
this.mockXWiki.stubs().method("getDocument").with(eq(reference2), ANYTHING).will(returnValue(doc2));
this.mockXWiki.stubs().method("getDocument").with(eq(reference3), ANYTHING).will(returnValue(doc3));
this.mockXWiki.stubs().method("getDocument").with(eq(reference4), ANYTHING).will(returnValue(doc4));
this.mockXWiki.stubs().method("getDocument").with(eq(reference5), ANYTHING).will(returnValue(doc5));
this.mockXWiki.stubs().method("saveDocument").isVoid();
this.mockXWiki.stubs().method("deleteDocument").isVoid();
this.mockXWikiStoreInterface.stubs().method("getTranslationList").will(returnValue(Arrays.asList()));
this.document.rename(new DocumentReference("newwikiname", "newspace", "newpage"),
Arrays.asList(reference1, reference2, reference3), Arrays.asList(reference4, reference5), getContext());
// Test links
assertEquals("[[doc:Wiki:Space.pageinsamespace]]", this.document.getContent());
assertEquals("[[doc:newwikiname:newspace.newpage]] " + "[[someName>>doc:newwikiname:newspace.newpage]] "
+ "[[doc:newwikiname:newspace.newpage]]", doc1.getContent());
assertEquals("[[doc:newspace.newpage]]", doc2.getContent());
assertEquals("[[doc:newpage]]", doc3.getContent());
// Test parents
assertEquals("newwikiname:newspace.newpage", doc4.getParent());
assertEquals(new DocumentReference("newwikiname", "newspace", "newpage"), doc5.getParentReference());
}
/**
* Validate rename does not crash when the document has 1.0 syntax (it does not support everything but it does not
* crash).
*/
public void testRename10() throws XWikiException
{
this.document.setContent("[pageinsamespace]");
this.document.setSyntax(Syntax.XWIKI_1_0);
DocumentReference targetReference = new DocumentReference("newwikiname", "newspace", "newpage");
XWikiDocument targetDocument = this.document.duplicate(targetReference);
this.mockXWiki.stubs().method("copyDocument").will(returnValue(true));
this.mockXWiki.stubs().method("getDocument").with(eq(targetReference), ANYTHING)
.will(returnValue(targetDocument));
this.mockXWiki.stubs().method("saveDocument").isVoid();
this.mockXWiki.stubs().method("deleteDocument").isVoid();
this.document.rename(new DocumentReference("newwikiname", "newspace", "newpage"),
Collections.<DocumentReference>emptyList(), Collections.<DocumentReference>emptyList(), getContext());
// Test links
assertEquals("[pageinsamespace]", this.document.getContent());
}
/**
* @see XWIKI-7515: 'getIncludedPages' in class com.xpn.xwiki.api.Document threw java.lang.NullPointerException
*/
public void testGetIncludedPages()
{
this.document.setSyntax(Syntax.XWIKI_2_1);
this.document.setContent("no include");
assertTrue(this.document.getIncludedPages(getContext()).isEmpty());
this.document.setContent("bad {{include/}}");
assertTrue(this.document.getIncludedPages(getContext()).isEmpty());
this.document.setContent("good deprecated {{include document=\"Foo.Bar\"/}}");
assertEquals(Arrays.asList("Foo.Bar"), this.document.getIncludedPages(getContext()));
this.document.setContent("good {{include reference=\"One.Two\"/}}");
assertEquals(Arrays.asList("One.Two"), this.document.getIncludedPages(getContext()));
this.document.setContent("bad recursive {{include reference=\"\"/}}");
assertTrue(this.document.getIncludedPages(getContext()).isEmpty());
this.document.setContent("bad recursive {{include reference=\"" + DOCNAME + "\"/}}");
assertTrue(this.document.getIncludedPages(getContext()).isEmpty());
this.document.setContent("bad recursive {{include reference=\"" + DOCSPACE + "." + DOCNAME + "\"/}}");
assertTrue(this.document.getIncludedPages(getContext()).isEmpty());
}
/**
* XWIKI-8025: XWikiDocument#backup/restoreContext doesn't update the reference to the Velocity context stored on
* the XWiki context
*/
public void testBackupRestoreContextUpdatesVContext() throws Exception
{
final Execution execution = getComponentManager().getInstance(Execution.class);
this.mockVelocityManager.stubs().method("getVelocityContext")
.will(new CustomStub("Implements VelocityManager.getVelocityContext")
{
@Override
public Object invoke(Invocation invocation) throws Throwable
{
VelocityContext velocityContext =
(VelocityContext) execution.getContext().getProperty("velocityContext");
// See DefaultVelocityManagerTest#testGetVelocityContextUpdatesXContext()
getContext().put("vcontext", velocityContext);
return velocityContext;
}
});
VelocityContext oldVelocityContext = new VelocityContext();
execution.getContext().setProperty("velocityContext", oldVelocityContext);
Map<String, Object> backup = new HashMap<String, Object>();
XWikiDocument.backupContext(backup, getContext());
VelocityContext newVelocityContext = (VelocityContext) execution.getContext().getProperty("velocityContext");
assertNotNull(newVelocityContext);
assertNotSame(oldVelocityContext, newVelocityContext);
assertSame(newVelocityContext, getContext().get("vcontext"));
XWikiDocument.restoreContext(backup, getContext());
assertSame(oldVelocityContext, execution.getContext().getProperty("velocityContext"));
assertSame(oldVelocityContext, getContext().get("vcontext"));
}
}
|
package com.yahoo.vespa.zookeeper;
import com.google.inject.Inject;
import com.yahoo.cloud.config.ZookeeperServerConfig;
import com.yahoo.component.AbstractComponent;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.admin.ZooKeeperAdmin;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.time.Duration;
import java.time.Instant;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Collectors;
/**
* Starts zookeeper server and supports reconfiguring zookeeper cluster. Created as a component
* without any config injected, to make sure that it is not recreated when config changes.
*
* @author hmusum
*/
public class Reconfigurer extends AbstractComponent {
private static final Logger log = java.util.logging.Logger.getLogger(Reconfigurer.class.getName());
private static final Duration sessionTimeout = Duration.ofSeconds(30);
private static final Duration retryReconfigurationPeriod = Duration.ofSeconds(30);
private static final Duration timeBetweenRetries = Duration.ofSeconds(1);
private ZooKeeperRunner zooKeeperRunner;
private ZookeeperServerConfig activeConfig;
@Inject
public Reconfigurer() {
log.log(Level.FINE, "Created ZooKeeperReconfigurer");
}
void startOrReconfigure(ZookeeperServerConfig newConfig) {
startOrReconfigure(newConfig, Reconfigurer::defaultSleeper);
}
void startOrReconfigure(ZookeeperServerConfig newConfig, Consumer<Duration> sleeper) {
if (zooKeeperRunner == null)
zooKeeperRunner = startServer(newConfig);
if (shouldReconfigure(newConfig))
reconfigure(newConfig, sleeper);
}
ZookeeperServerConfig activeConfig() {
return activeConfig;
}
void zooKeeperReconfigure(String connectionSpec, String joiningServers, String leavingServers) throws KeeperException {
try {
ZooKeeperAdmin zooKeeperAdmin = new ZooKeeperAdmin(connectionSpec,
(int) sessionTimeout.toMillis(),
new LoggingWatcher());
long fromConfig = -1;
// Using string parameters because the List variant of reconfigure fails to join empty lists (observed on 3.5.6, fixed in 3.7.0)
byte[] appliedConfig = zooKeeperAdmin.reconfigure(joiningServers, leavingServers, null, fromConfig, null);
log.log(Level.INFO, "Applied ZooKeeper config: " + new String(appliedConfig, StandardCharsets.UTF_8));
} catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
void shutdown() {
if (zooKeeperRunner != null) {
zooKeeperRunner.shutdown();
}
}
private boolean shouldReconfigure(ZookeeperServerConfig newConfig) {
if (!newConfig.dynamicReconfiguration()) return false;
if (activeConfig == null) return false;
return !newConfig.equals(activeConfig());
}
private ZooKeeperRunner startServer(ZookeeperServerConfig zookeeperServerConfig) {
ZooKeeperRunner runner = new ZooKeeperRunner(zookeeperServerConfig);
activeConfig = zookeeperServerConfig;
return runner;
}
private void reconfigure(ZookeeperServerConfig newConfig, Consumer<Duration> sleeper) {
Instant reconfigTriggered = Instant.now();
String leavingServers = String.join(",", difference(serverIds(activeConfig), serverIds(newConfig)));
String joiningServers = String.join(",", difference(servers(newConfig), servers(activeConfig)));
leavingServers = leavingServers.isEmpty() ? null : leavingServers;
joiningServers = joiningServers.isEmpty() ? null : joiningServers;
log.log(Level.INFO, "Will reconfigure ZooKeeper cluster. Joining servers: " + joiningServers +
", leaving servers: " + leavingServers);
String connectionSpec = connectionSpec(activeConfig);
boolean reconfigured = false;
Instant end = Instant.now().plus(retryReconfigurationPeriod);
// Loop reconfiguring since we might need to wait until another reconfiguration is finished before we can succeed
for (int attempts = 1; ! reconfigured && Instant.now().isBefore(end); attempts++) {
try {
Instant reconfigStarted = Instant.now();
zooKeeperReconfigure(connectionSpec, joiningServers, leavingServers);
Instant reconfigEnded = Instant.now();
log.log(Level.INFO, "Reconfiguration completed in " +
Duration.between(reconfigTriggered, reconfigEnded) +
", after " + attempts + " attempt(s). ZooKeeper reconfig call took " +
Duration.between(reconfigStarted, reconfigEnded));
reconfigured = true;
} catch (KeeperException e) {
if ( ! (e instanceof KeeperException.ReconfigInProgress))
throw new RuntimeException(e);
log.log(Level.INFO, "Reconfiguration failed due to colliding with another reconfig. Retrying in " +
timeBetweenRetries);
sleeper.accept(timeBetweenRetries);
}
}
activeConfig = newConfig;
}
private static String connectionSpec(ZookeeperServerConfig config) {
return config.server().stream()
.map(server -> server.hostname() + ":" + config.clientPort())
.collect(Collectors.joining(","));
}
private static List<String> serverIds(ZookeeperServerConfig config) {
return config.server().stream()
.map(ZookeeperServerConfig.Server::id)
.map(String::valueOf)
.collect(Collectors.toList());
}
private static List<String> servers(ZookeeperServerConfig config) {
// See https://zookeeper.apache.org/doc/r3.5.8/zookeeperReconfig.html#sc_reconfig_clientport for format
return config.server().stream()
.map(server -> server.id() + "=" + server.hostname() + ":" + server.quorumPort() + ":" +
server.electionPort() + ";" + config.clientPort())
.collect(Collectors.toList());
}
private static <T> List<T> difference(List<T> list1, List<T> list2) {
List<T> copy = new ArrayList<>(list1);
copy.removeAll(list2);
return copy;
}
private static void defaultSleeper(Duration duration) {
try {
Thread.sleep(duration.toMillis());
} catch (InterruptedException interruptedException) {
interruptedException.printStackTrace();
}
}
private static class LoggingWatcher implements Watcher {
@Override
public void process(WatchedEvent event) {
log.log(Level.INFO, event.toString());
}
}
}
|
package pal;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.StringTokenizer;
public class Main
{
static String newline = System.getProperty("line.separator");
static int BIG_ENOUGHT_LINES = 10000;
static int BIG_ENOUGHT_NEIGHBOURS = 100;
static BufferedReader bi = new BufferedReader(new InputStreamReader(System.in));
static StringBuilder buffer = new StringBuilder();
static HashMap<String,Node> cache = new HashMap<String,Node>();
static String[] line_map = new String[BIG_ENOUGHT_LINES];
static String[] relation = new String[BIG_ENOUGHT_LINES];
static int LINES_RED = 0;
static String name = "";
static int current = 0;
static Queue<Node> q = new LinkedList<Node>();
static String FIRST = "";
public static void main(String[] args) throws IOException
{
buffer.setLength(0);
cache.clear();
q.clear();
LINES_RED = 0;
FIRST = "";
name = "";
current = 0;
bi = new BufferedReader(new InputStreamReader(System.in));
try
{
slurp();
Node root = cache.get(FIRST);
// unlinked();
traverse(root);
// circle(root);
out();
}
catch(FatalError e){ System.out.println("ERROR"); }
}
private static void unlinked()
{
for(Node n : cache.values())
if(n.isDependency && !n.isDeclared) throw new FatalError();
}
private static Node node(String key)
{
key = key.trim();
Node n = cache.get(key);
if(n==null)
{
n = new Node(key);
cache.put(key, n);
}
return n;
}
static StringTokenizer tokenizer;
private static void readLine(String line) throws FatalError
{
if(line.trim().isEmpty()) return;
line_map[LINES_RED++] = line;
if (line.startsWith("\t"))
{
relation[LINES_RED-1] = name;
}
else
{
tokenizer = new StringTokenizer(line);
try
{ name = tokenizer.nextToken().replace(":", ""); }
catch(NoSuchElementException e)
{ return; }
if(FIRST.equalsIgnoreCase("")) FIRST=name;
Node n = null;
if(!name.equals(FIRST) && !cache.containsKey(name))
{
n = node(name);
n.dead = true;
}
else n = node(name);
n.isDeclared=true;
while(tokenizer.hasMoreTokens())
{
Node m = node(tokenizer.nextToken());
n.addEdge(m);
m.isDependency = true;
}
relation[LINES_RED-1]= name;
}
}
public static void slurp() throws FatalError,IOException
{
String line="";
while ((line = bi.readLine()) != null)
{ readLine(line); }
}
static void out() throws FatalError
{
buffer.setLength(0);
Node n = null;
for(int i=0; i<LINES_RED; i++)
{
n = cache.get(relation[i]);
if(n==null) continue;
if(n.dead && n.visited) throw new FatalError();
if(n.visited) buffer.append(line_map[i]+newline);
else buffer.append("#"+line_map[i]+newline);
}
System.out.print(buffer);
}
public static void circle(Node node) throws FatalError
{
for(int i=0; i<node.index; i++)
{
Node n = node.link[i];
n.marked = true;
if(circle(n,node))
{
n.marked = false;
throw new FatalError();
}
n.marked = false;
}
}
public static boolean circle(Node node, Node origin)
{
for(int i=0; i<node.index; i++)
{
Node n = node.link[i];
if(n==null) continue;
if(origin.marked && n.marked) return true;
if(node.marked && node.visited)
{
n.marked=true;
if(circle(n,node))
{
n.marked=false;
return true;
}
n.marked = false;
}
}
return false;
}
public static void traverse(Node node) throws FatalError
{
q.clear();
q.add(node);
node.visited = true;
while(!q.isEmpty())
{
Node n = (Node)q.poll();
for(int i=0; i<n.index; i++)
{
Node adj = n.link[i];
if(!adj.visited)
{
adj.visited = true;
q.add(adj);
}
}
}
}
static class FatalError extends RuntimeException
{ private static final long serialVersionUID = 3638938829930139263L; }
static class Node
{
String data = "";
boolean visited = false;
boolean marked = false;
boolean dead = false;
boolean isDependency = false;
boolean isDeclared = false;
private int index = 0;
Node[] link = new Node[BIG_ENOUGHT_NEIGHBOURS];
public Node(String data)
{ this.data = data; }
public void addEdge(Node b)
{ link[index++]=b; }
public java.lang.String toString()
{ return data.toString()+"["+(visited?"X":" ")+"]"; }
public boolean equals(Object another)
{ return ((Node)another).data == this.data; }
}
}
|
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.json.JSONException;
import org.json.JSONObject;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
public class Main extends HttpServlet {
private static String TABLE_CREATION = "CREATE TABLE IF NOT EXISTS monster " +
"(pad_ID varchar(9), name varchar(64), level int, " +
"skill_level int, awakenings int, plus_eggs int, " +
"CONSTRAINT uniqueMonster UNIQUE (pad_ID, name))";
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
Connection connection = null;
try {
connection = getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate(TABLE_CREATION);
}
catch (Exception e) {
resp.setStatus(500);
resp.getWriter().print("Table creation error: " + e.getMessage());
}
try {
String req_string = req.getRequestURI();
String[] split_req = req_string.split("/");
if (split_req[1].equals("getMonsters")) {
resp.setStatus(200);
printMonsterList.printMonsterList(resp);
}
else if (split_req[1].equals("changeID")) {
changeID.changeID(connection, resp, split_req[2], split_req[3]);
}
else if (split_req[1].equals("topLeaders")) {
topLeaders.topLeaders(connection, resp);
}
else if (split_req.length < 3) {
resp.setStatus(404);
} else if (split_req[1].equals("monsters")) {
getMonsterBox.getMonsterBox(connection, resp, split_req[2]);
} else {
resp.setStatus(404);
}
}
catch (JSONException e1) {
resp.setStatus(500);
resp.getWriter().print("Failed to parse body JSON: " + getStackTrace(e1));
}
catch (IOException e) {
resp.setStatus(500);
resp.getWriter().print("Failed to prepare SQL statement: " + getStackTrace(e));
}
finally {
try {
connection.close();
}
catch (SQLException e) {
resp.getWriter().print("Failed to close connection: " + getStackTrace(e));
}
}
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException
{
Connection connection = null;
try {
connection = getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate(TABLE_CREATION);
}
catch (Exception e) {
resp.setStatus(500);
resp.getWriter().print("Table creation error: " + e.getMessage());
}
resp.addHeader("Access-Control-Allow-Origin", "http://localhost:5000");
StringBuilder jb = new StringBuilder();
String line;
try {
BufferedReader reader = req.getReader();
while ((line = reader.readLine()) != null)
jb.append(line);
}
catch (IOException e) {
resp.setStatus(400);
resp.getWriter().print("Couldn't read in request body: " + getStackTrace(e));
}
try {
JSONObject jsonObject = new JSONObject(jb.toString());
if (req.getRequestURI().endsWith("/fetch")) {
fetchIDs.fetchIDs(connection, resp, jsonObject);
}
else if (req.getRequestURI().endsWith("/update")) {
updateMonster.updateMonster(connection, resp, jsonObject);
}
else if (req.getRequestURI().endsWith("/delete")) {
deleteMonster.delete_monster(connection, resp, jsonObject);
}
else {
resp.setStatus(404);
}
}
catch (JSONException e1) {
resp.setStatus(400);
resp.getWriter().print("Invalid JSON: " + jb.toString());
}
catch (IOException e) {
resp.setStatus(500);
resp.getWriter().print("{}");
}
finally {
try {
connection.close();
}
catch (SQLException e) {
resp.getWriter().print("Failed to close connection: " + getStackTrace(e));
}
}
}
private static Connection getConnection() throws URISyntaxException, SQLException
{
URI dbUri = new URI(System.getenv("DATABASE_URL"));
String username = dbUri.getUserInfo().split(":")[0];
String password = dbUri.getUserInfo().split(":")[1];
String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + dbUri.getPath();
return DriverManager.getConnection(dbUrl, username, password);
}
public static void main(String[] args) throws Exception {
Server server = new Server(Integer.valueOf(System.getenv("PORT")));
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
server.setHandler(context);
|
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.*;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.*;
import static javax.measure.unit.SI.KILOGRAM;
import javax.measure.quantity.Mass;
import org.jscience.physics.model.RelativisticModel;
import org.jscience.physics.amount.Amount;
public class Main extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
if (req.getRequestURI().endsWith("/db")) {
showDatabase(req,resp);
} else {
showHome(req,resp);
}
}
private void showHome(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
// Energy is compatible with mass (E=mc2)
// Energy is compatible with mass (E=mc2)
RelativisticModel.select();
String energy = System.getenv().get("ENERGY");
Amount<Mass> m = Amount.valueOf(energy).to(KILOGRAM);
resp.getWriter().print("E=mc^2: " + energy + " = " + m);
}
private void showDatabase(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
try {
Connection connection = getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
stmt.executeUpdate("INSERT INTO ticks VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks");
String out = "Hello!\n";
while (rs.next()) {
out += "Read from DB: " + rs.getTimestamp("tick") + "\n";
}
resp.getWriter().print(out);
} catch (Exception e) {
resp.getWriter().print("There was an error: " + e.getMessage());
}
}
private Connection getConnection() throws URISyntaxException, SQLException {
URI dbUri = new URI(System.getenv("DATABASE_URL"));
String username = dbUri.getUserInfo().split(":")[0];
String password = dbUri.getUserInfo().split(":")[1];
String dbUrl = "jdbc:postgresql://" + dbUri.getHost() + dbUri.getPath();
return DriverManager.getConnection(dbUrl, username, password);
}
public static void main(String[] args) throws Exception{
Server server = new Server(Integer.valueOf(System.getenv("PORT")));
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
server.setHandler(context);
|
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class Main {
public static void main(String[] args) throws Exception {
List<List<String>> transData = FileUtils.readTrainData();
// build tree
TreeNode root = buildTree();
List<List<String>> testData = FileUtils.readTestData();
for(List<String> data: testData){
String label = TreeNode.getLabel(data, root);
System.out.println(String.format("label is %s", label));
}
}
static TreeNode buildTree(){
return new TreeNode();
}
static Double calculateExperienceEntropy(List<List<String>> data){
Map<String, Double> entropyMap = calculateEntropyMap(data, "");
double sum = 0.0f;
for(Double entropy: entropyMap.values()){
sum += entropy;
}
return -sum;
}
// static Double calculateConditionalEntropy(List<List<String>> data, String header){
// Map<String, List<List<String>>> labelDataMap = groupByLabelValue(data);
static private Map<String,Double> calculateEntropyMap(List<List<String>> data, String header){
Map<String, Integer> countMap = groupAndCount(data, header);
Map<String, Double> entropyMap = new HashMap<>();
for(Map.Entry<String, Integer> entry: countMap.entrySet()){
Double probability = entry.getValue()*1.0/data.size();
Double entropy = probability * (Math.log(probability)/Math.log(2));
entropyMap.put(entry.getKey(), entropy);
}
return entropyMap;
}
static Map<String, Integer> groupAndCount(List<List<String>> data, String header){
Integer index = FileUtils.header2IndexMap.get(header);
Map<String, Integer> res = new HashMap<>();
for(List<String> row: data){
String thisValue = row.get(index);
if(!res.keySet().contains(thisValue)){
res.put(thisValue, 1);
}else{
res.put(thisValue, res.get(thisValue) + 1);
}
}
return res;
}
static Map<String, List<List<String>>> groupByLabelValue(List<List<String>> data){
Integer index = FileUtils.header2IndexMap.get("");
Map<String, List<List<String>>> res = new HashMap<>();
for(List<String> row: data){
String labelValue = row.get(index);
if(!res.keySet().contains(labelValue)){
List<List<String>> _data = new ArrayList<>();
_data.add(row);
res.put(labelValue, _data);
}else{
res.get(labelValue).add(row);
}
}
return res;
}
}
|
import com.google.gson.Gson;
import java.sql.*;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Map;
import java.net.URI;
import java.net.URISyntaxException;
import static spark.Spark.*;
import spark.template.freemarker.FreeMarkerEngine;
import spark.ModelAndView;
import static spark.Spark.get;
import com.heroku.sdk.jdbc.DatabaseUrl;
public class Main {
public static void main(String[] args) {
port(Integer.valueOf(System.getenv("PORT")));
staticFileLocation("/public");
get("/hello", (req, res) -> "Hello World");
get("/test", (req, res) -> {
ArrayList<String> oddtopics = new ArrayList<String>();
oddtopics.add("Animal");
oddtopics.add("Beauty");
oddtopics.add("Books");
oddtopics.add("Television");
ArrayList<String> eventopics = new ArrayList<String>();
eventopics.add("Culture");
eventopics.add("Music");
eventopics.add("Technology");
ArrayList<String> weektopics = new ArrayList<String>();
weektopics.add("Cooking");
weektopics.add("Movies");
weektopics.add("Sports");
weektopics.add("Travel");
Map<String, Object> attributes = new HashMap<>();
attributes.put("oddtopics", oddtopics);
attributes.put("eventopics", eventopics);
attributes.put("weektopics", weektopics);
return new ModelAndView(attributes, "index.ftl");
} , new FreeMarkerEngine());
// get("/register", (req, res) -> {
// Map<String, Object> data = new HashMap<>();
// data.put("", "json");
// data.put("status", "live");
// return data;
// }, gson::toJson);
//get("/", (request, response) -> {
// Map<String, Object> attributes = new HashMap<>();
// attributes.put("message", "Hello World!");
// return new ModelAndView(attributes, "index.ftl");
// }, new FreeMarkerEngine());
// get("/api/", (req, res) -> {
// Map<String, Object> data = new HashMap<>();
// data.put("", "json");
// data.put("status", "live");
// return data;
// }, gson::toJson);
Gson gson = new Gson();
get("api/find", (req, res) -> {
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try {
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
// stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
// stmt.executeUpdate("INSERT INTO ticks VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT username, nlanguage, planguage FROM users");
Map<String, Object> data = new HashMap<>();
//ArrayList<String> output = new ArrayList<String>();
while (rs.next()) {
Map<String, Object> member = new HashMap<>();
member.put("username", rs.getString("username"));
member.put("nlanguage", rs.getString("nlanguage"));
member.put("planguage", rs.getString("planguage"));
data.put(rs.getString("username"), member);
}
return data;
//attributes.put("results", output);
//return new ModelAndView(attributes, "db.ftl");
} catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}
}, gson::toJson);
get("/db", (req, res) -> {
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try {
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
stmt.executeUpdate("INSERT INTO ticks VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks");
ArrayList<String> output = new ArrayList<String>();
while (rs.next()) {
output.add( "Read from DB: " + rs.getTimestamp("tick"));
}
attributes.put("results", output);
return new ModelAndView(attributes, "db.ftl");
} catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}
}, new FreeMarkerEngine());
}
}
|
import java.sql.*;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.Map;
import java.net.URI;
import java.net.URISyntaxException;
import static spark.Spark.*;
import spark.template.freemarker.FreeMarkerEngine;
import spark.ModelAndView;
import static spark.Spark.get;
import com.heroku.sdk.jdbc.DatabaseUrl;
public class Main {
public static void main(String[] args) {
port(Integer.valueOf(System.getenv("PORT")));
staticFileLocation("/public");
get("/hello", (req, res) -> "Hello World");
//ftl
get("/index1", (req, res) -> {
Map<String, Object> attributes = new HashMap<>();
ArrayList<String> technology= new ArrayList<String>();
technology.add("DBA");
technology.add("web developer");
technology.add("network admin");
technology.add("data analyst");
ArrayList<String> healthworker= new ArrayList<String>();
healthworker.add("CNA");
healthworker.add("Register Nurse");
healthworker.add("house keeping");
healthworker.add("health assistant");
ArrayList<String> admin= new ArrayList<String>();
admin.add("HR manager");
admin.add("accountant");
admin.add("branch manager");
admin.add("tech manager");
SimpleDateFormat formatter = new SimpleDateFormat("EEEE");
String jobAtIdeal = formatter.format();
System.out.println(jobAtIdeal);
System.out.println(admin);
attributes.put("technology", technology);
attributes.put("healthworker", healthworker);
attributes.put("admin", admin);
attributes.put("jobAtIdeal", jobAtIdeal);
return new ModelAndView(attributes, "index1.ftl");
} , new FreeMarkerEngine());
get("/", (request, response) -> {
Map<String, Object> attributes = new HashMap<>();
attributes.put("message", "Hello World!");
return new ModelAndView(attributes, "index.ftl");
}, new FreeMarkerEngine());
get("/db", (req, res) -> {
Connection connection = null;
Map<String, Object> attributes = new HashMap<>();
try {
connection = DatabaseUrl.extract().getConnection();
Statement stmt = connection.createStatement();
stmt.executeUpdate("CREATE TABLE IF NOT EXISTS ticks (tick timestamp)");
stmt.executeUpdate("INSERT INTO ticks VALUES (now())");
ResultSet rs = stmt.executeQuery("SELECT tick FROM ticks");
ArrayList<String> output = new ArrayList<String>();
while (rs.next()) {
output.add( "Read from DB: " + rs.getTimestamp("tick"));
}
attributes.put("results", output);
return new ModelAndView(attributes, "db.ftl");
} catch (Exception e) {
attributes.put("message", "There was an error: " + e);
return new ModelAndView(attributes, "error.ftl");
} finally {
if (connection != null) try{connection.close();} catch(SQLException e){}
}
}, new FreeMarkerEngine());
}
}
|
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.TreeMap;
public class Word {
private static TreeMap<String, Word> mInstances = new TreeMap<String, Word>();
private final String mDictionaryWord;
private final LocalDate mCreatedAt;
private ArrayList<Definition> mDefinitions;
public Word(String userInput) {
mDictionaryWord = userInput.replaceAll("(%[A-Z0-9a-z]{1,2})|(\\+)", " ").trim().toLowerCase();
mCreatedAt = LocalDate.now();
mDefinitions = new ArrayList<Definition>();
mInstances.put(mDictionaryWord, this);
}
public static Word getWord(String word) {
return mInstances.get(word);
}
public static void removeWord(String word) {
mInstances.remove(word);
}
public static TreeMap<String, Word> getAll() {
return mInstances;
}
public static void clearAll() {
mInstances.clear();
}
public String getString() {
return mDictionaryWord;
}
public void addDefinition(Definition definition) {
mDefinitions.add(definition);
}
public void removeDefinition(Definition definition) {
mDefinitions.remove(definition);
}
public ArrayList<Definition> getAllDefinitions() {
return mDefinitions;
}
public LocalDate getCreatedAt() {
return mCreatedAt;
}
}
|
package ed.lang.ruby;
import java.io.*;
import org.jruby.*;
import org.testng.annotations.Test;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import ed.appserver.JSFileLibrary;
import ed.js.*;
import ed.js.engine.Scope;
import ed.js.func.JSFunctionCalls0;
import ed.lang.ruby.RubyJxpSource;
public class RubyFileRunnerTest {
protected static final String QA_RAILS_TEST_DIR_RELATIVE = "modules/ruby/rails";
@Test(groups = {"ruby", "ruby.testunit"})
public void testRunRubyTests() {
runTestsIn(new File(System.getenv("ED_HOME"), "src/test/ed/lang/ruby"));
}
/**
* This test runs the tests in QA_RAILS_TEST_DIR_RELATIVE, but only if
* that directory exists. We look in two places: /data/qa and
* $ED_HOME/../qa.
* <p>
* These tests require one or more copies of Rails itself, which we'd like
* to keep out of the base Babble code.
*/
@Test(groups = {"ruby", "ruby.testunit", "ruby.activerecord"})
public void testRunRailsTests() {
File dir;
if ((dir = new File("/data/qa", QA_RAILS_TEST_DIR_RELATIVE)).exists() ||
(dir = new File(new File(System.getenv("ED_HOME"), "../qa"), QA_RAILS_TEST_DIR_RELATIVE)).exists())
runTestsIn(dir);
else
assertTrue(true);
}
protected void runTestsIn(File rootDir) {
String edHome = System.getenv("ED_HOME");
File f = new File(rootDir, "run_all_tests.rb");
Scope s = createScope(f.getParentFile());
RubyJxpSource source = new RubyJxpSource(f, Ruby.newInstance());
addRubyLoadPath(s, source, new File(edHome, "build").getPath()); // for xgen.rb and files it includes
addRubyLoadPath(s, source, rootDir.getPath());
try {
source.getFunction().call(s, new Object[0]);
}
catch (Exception e) {
e.printStackTrace();
fail("while running file " + f.getPath() + ", exception was thrown: " + e);
}
}
protected Scope createScope(File localRootDir) {
Scope s = Scope.newGlobal();
s = new Scope("test", s); // child of global scope
Shell.addNiceShellStuff(s);
s.set("local", new JSFileLibrary(localRootDir, "local", s));
s.set("rails_local", new JSFileLibrary(new File(localRootDir, "rails-test-app"), "local", s));
s.set("core", CoreJS.get().getLibrary(null, null, s, false));
s.set("jsout", ""); // initial value; used by tests; will be written over later
JSFunction print = new JSFunctionCalls0() {
public Object call(Scope s, Object[] args) {
return s.put("jsout", s.get("jsout").toString() + args[0].toString() + "\n", false); // default print behavior adds newline
}
};
print.setName("print");
s.set("print", print);
return s;
}
protected void addRubyLoadPath(Scope s, RubyJxpSource source, String path) {
Ruby runtime = source.getRuntime(s);
RubyString rpath = RubyString.newString(runtime, path.replace('\\', '/'));
RubyArray loadPath = (RubyArray)runtime.getLoadService().getLoadPath();
if (loadPath.include_p(runtime.getCurrentContext(), rpath).isFalse())
loadPath.append(rpath);
}
}
|
package com.cronutils;
import com.cronutils.model.Cron;
import com.cronutils.model.definition.CronDefinition;
import com.cronutils.model.definition.CronDefinitionBuilder;
import com.cronutils.model.time.ExecutionTime;
import com.cronutils.parser.CronParser;
import org.junit.*;
import org.threeten.bp.ZoneId;
import org.threeten.bp.ZonedDateTime;
import static com.cronutils.model.CronType.QUARTZ;
import static org.junit.Assert.*;
public class Issue200Test {
// Does not pass!
//@Test
public void testMustMatchCronEvenIfNanoSecondsVaries() {
CronDefinition cronDefinition =
CronDefinitionBuilder.instanceDefinitionFor(QUARTZ);
CronParser parser = new CronParser(cronDefinition);
Cron quartzCron = parser.parse("00 00 10 * * ?");
quartzCron.validate();
// NOTE: Off by 3 nano seconds
ZonedDateTime zdt = ZonedDateTime.of(1999, 07, 18, 10, 00, 00, 03, ZoneId.systemDefault());
// Must be true
assertTrue("Nano seconds must not affect matching of Cron Expressions", ExecutionTime.forCron(quartzCron).isMatch(zdt));
}
// Nano second-perfect (passes, no surprises here)
@Test
public void testMatchExact() {
CronDefinition cronDefinition =
CronDefinitionBuilder.instanceDefinitionFor(QUARTZ);
CronParser parser = new CronParser(cronDefinition);
Cron quartzCron = parser.parse("00 00 10 * * ?");
quartzCron.validate();
ZonedDateTime zdt = ZonedDateTime.of(1999, 07, 18, 10, 00, 00, 00, ZoneId.systemDefault());
assertTrue("Nano seconds must not affect matching of Cron Expressions", ExecutionTime.forCron(quartzCron).isMatch(zdt));
}
}
|
package com.hankcs.demo;
import com.hankcs.hanlp.HanLP;
import com.hankcs.hanlp.dictionary.py.Pinyin;
import java.util.List;
/**
*
* @author hankcs
*/
public class DemoPinyin
{
public static void main(String[] args)
{
String text = "";
List<Pinyin> pinyinList = HanLP.convertToPinyinList(text);
System.out.print(",");
for (char c : text.toCharArray())
{
System.out.printf("%c,", c);
}
System.out.println();
System.out.print(",");
for (Pinyin pinyin : pinyinList)
{
System.out.printf("%s,", pinyin);
}
System.out.println();
System.out.print(",");
for (Pinyin pinyin : pinyinList)
{
System.out.printf("%s,", pinyin.getPinyinWithToneMark());
}
System.out.println();
System.out.print(",");
for (Pinyin pinyin : pinyinList)
{
System.out.printf("%s,", pinyin.getPinyinWithoutTone());
}
System.out.println();
System.out.print(",");
for (Pinyin pinyin : pinyinList)
{
System.out.printf("%s,", pinyin.getTone());
}
System.out.println();
System.out.print(",");
for (Pinyin pinyin : pinyinList)
{
System.out.printf("%s,", pinyin.getShengmu());
}
System.out.println();
System.out.print(",");
for (Pinyin pinyin : pinyinList)
{
System.out.printf("%s,", pinyin.getYunmu());
}
System.out.println();
System.out.print(",");
for (Pinyin pinyin : pinyinList)
{
System.out.printf("%s,", pinyin.getHead());
}
System.out.println();
System.out.println(HanLP.convertToPinyinString("2012", " ", true));
System.out.println(HanLP.convertToPinyinString("2012", " ", false));
}
}
|
package datamodel;
import com.google.common.collect.Range;
import io.github.mzmine.datamodel.Frame;
import io.github.mzmine.datamodel.IMSRawDataFile;
import io.github.mzmine.datamodel.ImsMsMsInfo;
import io.github.mzmine.datamodel.MassSpectrumType;
import io.github.mzmine.datamodel.MergedMsMsSpectrum;
import io.github.mzmine.datamodel.MobilityType;
import io.github.mzmine.datamodel.PolarityType;
import io.github.mzmine.datamodel.features.ModularFeature;
import io.github.mzmine.datamodel.features.ModularFeatureList;
import io.github.mzmine.datamodel.features.ModularFeatureListRow;
import io.github.mzmine.datamodel.features.types.ImsMsMsInfoType;
import io.github.mzmine.datamodel.features.types.numbers.BestFragmentScanNumberType;
import io.github.mzmine.datamodel.features.types.numbers.BestScanNumberType;
import io.github.mzmine.datamodel.features.types.numbers.FragmentScanNumbersType;
import io.github.mzmine.datamodel.impl.BuildingMobilityScan;
import io.github.mzmine.datamodel.impl.ImsMsMsInfoImpl;
import io.github.mzmine.datamodel.impl.SimpleFrame;
import io.github.mzmine.datamodel.impl.masslist.ScanPointerMassList;
import io.github.mzmine.main.MZmineCore;
import io.github.mzmine.parameters.parametertypes.tolerances.MZTolerance;
import io.github.mzmine.project.impl.IMSRawDataFileImpl;
import io.github.mzmine.util.RangeUtils;
import io.github.mzmine.util.scans.SpectraMerging;
import io.github.mzmine.util.scans.SpectraMerging.MergingType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javafx.scene.paint.Color;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.TestInstance.Lifecycle;
@TestInstance(Lifecycle.PER_CLASS)
public class IMSScanTypesTest {
IMSRawDataFile file;
ModularFeatureList flist;
ModularFeatureListRow row;
ModularFeature feature;
@BeforeAll
void initialise() {
MZmineCore.main(new String[]{"-r", "-m", "all"});
try {
file = new IMSRawDataFileImpl("testfile", null, null, Color.BLACK);
} catch (IOException e) {
e.printStackTrace();
Assertions.fail("Cannot initialise data file.");
}
Assertions.assertNotNull(file);
flist = new ModularFeatureList("flist", null, file);
row = new ModularFeatureListRow(flist, 1);
feature = new ModularFeature(flist, file, null, null);
row.addFeature(file, feature);
flist.addRow(row);
// generate ms1 frames
for (int i = 0; i < 5; i++) {
List<BuildingMobilityScan> scans = new ArrayList<>();
for (int j = 0; j < 5; j++) {
scans.add(new BuildingMobilityScan(j, new double[0], new double[0]));
}
SimpleFrame frame = new SimpleFrame(file, i, 1, 0.1f * i, 0, 0, new double[0], new double[0],
MassSpectrumType.CENTROIDED, PolarityType.POSITIVE, "", Range.closed(0d, 1d),
MobilityType.TIMS, null);
frame.setMobilities(new double[]{5d, 4d, 3d, 2d, 1d});
frame.setMobilityScans(scans);
try {
file.addScan(frame);
} catch (IOException e) {
Assertions.fail();
}
}
// generate ms2 frames
for (int i = 5; i < 10; i++) {
List<BuildingMobilityScan> scans = new ArrayList<>();
for (int j = 0; j < 5; j++) {
scans.add(new BuildingMobilityScan(j, new double[]{500, 600}, new double[]{500, 600}));
}
SimpleFrame frame = new SimpleFrame(file, i, 2, 0.1f * i, 0, 0, new double[0], new double[0],
MassSpectrumType.CENTROIDED, PolarityType.POSITIVE, "", Range.closed(0d, 1d),
MobilityType.TIMS, null);
frame.setMobilities(new double[]{5d, 4d, 3d, 2d, 1d});
frame.setMobilityScans(scans);
// set mass lists for merging
frame.getMobilityScans().stream().forEach(ms -> ms.addMassList(new ScanPointerMassList(ms)));
try {
file.addScan(frame);
} catch (IOException e) {
Assertions.fail();
}
}
flist.setSelectedScans(file, file.getFrames().subList(0, 4));
}
@Test
void bestScanNumberTypeTest() {
BestScanNumberType type = new BestScanNumberType();
Frame value = file.getFrame(3);
DataTypeTestUtils.testSaveLoad(type, value, flist, row, null, null);
DataTypeTestUtils.testSaveLoad(type, value, flist, row, feature, file);
}
@Test
void bestFragmentScanNumberTypeTest() {
BestFragmentScanNumberType type = new BestFragmentScanNumberType();
ImsMsMsInfo info = new ImsMsMsInfoImpl(300d, Range.closed(1, 3), 30f, 1, file.getFrame(4),
file.getFrame(6));
MergedMsMsSpectrum value = SpectraMerging
.getMergedMsMsSpectrumForPASEF(info, new MZTolerance(0.01, 10), MergingType.SUMMED, null,
RangeUtils.toFloatRange(file.getFrame(5).getMobilityRange()), null);
MergedMsMsSpectrum loaded = (MergedMsMsSpectrum) DataTypeTestUtils
.saveAndLoad(type, value, flist, row, null, null);
compareMergedMsMs(value, loaded);
loaded = (MergedMsMsSpectrum) DataTypeTestUtils
.saveAndLoad(type, value, flist, row, feature, file);
compareMergedMsMs(value, loaded);
}
@Test
void fragmentScanNumbersTypeTest() {
FragmentScanNumbersType type = new FragmentScanNumbersType();
List<MergedMsMsSpectrum> value = new ArrayList<>();
for (int i = 5; i < 10; i++) {
ImsMsMsInfo info = new ImsMsMsInfoImpl(300d, Range.closed(1, 3), 30f, 1, file.getFrame(i - 5),
file.getFrame(i));
MergedMsMsSpectrum scan = SpectraMerging
.getMergedMsMsSpectrumForPASEF(info, new MZTolerance(0.01, 10), MergingType.SUMMED, null,
RangeUtils.toFloatRange(file.getFrame(i).getMobilityRange()), null);
value.add(scan);
}
List<MergedMsMsSpectrum> loaded = (List<MergedMsMsSpectrum>) DataTypeTestUtils
.saveAndLoad(type, value, flist, row, null, null);
for (int i = 0; i < value.size(); i++) {
compareMergedMsMs(value.get(i), loaded.get(i));
}
loaded = (List<MergedMsMsSpectrum>) DataTypeTestUtils
.saveAndLoad(type, value, flist, row, feature, file);
for (int i = 0; i < value.size(); i++) {
compareMergedMsMs(value.get(i), loaded.get(i));
}
}
@Test
void testImsMsMsInfoType() {
ImsMsMsInfoType type = new ImsMsMsInfoType();
List<ImsMsMsInfo> list = new ArrayList<>();
for (int i = 5; i < 10; i++) {
ImsMsMsInfo info = new ImsMsMsInfoImpl(300d, Range.closed(1, 3), 30f, 1, file.getFrame(i - 5),
file.getFrame(i));
list.add(info);
}
DataTypeTestUtils.testSaveLoad(type, list, flist, row, feature, file);
}
private static void compareMergedMsMs(MergedMsMsSpectrum value, MergedMsMsSpectrum loaded) {
Assertions.assertEquals(value.getCollisionEnergy(), loaded.getCollisionEnergy());
Assertions.assertEquals(value.getBasePeakIndex(), loaded.getBasePeakIndex());
Assertions.assertEquals(value.getBasePeakMz(), loaded.getBasePeakMz());
Assertions.assertEquals(value.getCenterFunction(), loaded.getCenterFunction());
Assertions.assertEquals(value.getBasePeakIntensity(), loaded.getBasePeakIntensity());
Assertions.assertEquals(value.getDataFile(), loaded.getDataFile());
Assertions.assertEquals(value.getDataPointMZRange(), loaded.getDataPointMZRange());
Assertions.assertEquals(value.getScanningMZRange(), loaded.getScanningMZRange());
Assertions.assertEquals(value.getPolarity(), loaded.getPolarity());
Assertions.assertEquals(value.getNumberOfDataPoints(), loaded.getNumberOfDataPoints());
Assertions.assertEquals(value.getScanNumber(), loaded.getScanNumber());
Assertions.assertEquals(value.getPrecursorCharge(), loaded.getPrecursorCharge());
Assertions.assertEquals(value.getPrecursorMZ(), loaded.getPrecursorMZ());
Assertions.assertEquals(value.getRetentionTime(), loaded.getRetentionTime());
Assertions.assertEquals(value.getScanDefinition(), loaded.getScanDefinition());
Assertions.assertEquals(value.getSourceSpectra(), loaded.getSourceSpectra());
Assertions.assertEquals(value.getTIC(), loaded.getTIC());
Assertions.assertEquals(value.getMSLevel(), loaded.getMSLevel());
Assertions.assertEquals(value.getMergingType(), loaded.getMergingType());
for (int i = 0; i < value.getNumberOfDataPoints(); i++) {
Assertions.assertEquals(value.getIntensityValue(i), loaded.getIntensityValue(i));
Assertions.assertEquals(value.getMzValue(i), loaded.getMzValue(i));
}
}
}
|
package org.bdb;
import static org.junit.Assert.*;
import org.dml.storage.berkeleydb.exceptions.*;
import org.dml.storage.berkeleydb.generics.*;
import org.junit.*;
import org.q.*;
public class TestBDBEnvironment
{
@Test
public void testUNG() {
StorageBDBGeneric env = null;
try {
env = GlobalBDB.factory.getNewStorage( JUnitConstants.BDB_ENVIRONMENT_STORE_DIR, true );
final String sameName = "some name";
final int delta = +1;
final long max = 10;
final long min = -201;
final long initialValue = -6;
final Named_UniqueNumberGenerator ung =
new Named_UniqueNumberGenerator( env, sameName, min, initialValue, max, false );
long l1 = ung.getNextUniqueLong( delta );
assertTrue( initialValue == l1 );
final Named_UniqueNumberGenerator ung2 =
new Named_UniqueNumberGenerator( env, sameName, min, initialValue, max, false );
final long l2 = ung2.getNextUniqueLong( delta );
// System.out.println( ung + " / " + ung2 );
assertTrue( ung2 != ung );// they are same internally in BDB though.
// assertTrue( ung2.equals( ung ) );
try {
ung2.equals( ung );// not implemented
fail( "should've thrown" );// FIXME: i forgot why this is bad; and throw null; is better
} catch ( final BadCallError bce ) {
// this is right
}
final long expected = ( initialValue + ( 1 * delta ) );
assertTrue( "got:" + l2 + " instead of " + expected, expected == l2 );
assertTrue( l1 != l2 );
assertTrue( ( l1 + delta ) == l2 );
l1 = ung.getNextUniqueLong( delta );
assertTrue( l1 == ( initialValue + ( 2 * delta ) ) );
l1 = ung.getNextUniqueLong( delta );
assertTrue( l1 == ( initialValue + ( 3 * delta ) ) );
for ( int i = 0; i < ( ( max - 3 ) - initialValue ); i++ ) {
l1 = ung.getNextUniqueLong( delta );
// System.out.println( "l1==" + l1 );
assertTrue( l1 == ( ( ( 4 + i ) * delta ) + initialValue ) );
}
try {
l1 = ung.getNextUniqueLong( delta );
Q.fail();
} catch ( final Throwable soe ) {
// right
if ( !Q.isBareException( soe, SequenceOverflow_BDBException.class ) ) {
Q.rethrow( soe );
}
}
} finally {
if ( null != env ) {
env.shutdown();
}
}
StorageBDBGeneric env3 = null;
StorageBDBGeneric env5 = null;
try {
env3 = GlobalBDB.factory.getNewStorage( JUnitConstants.BDB_ENVIRONMENT_STORE_DIR, false );
try {
env5 = GlobalBDB.factory.getNewStorage( JUnitConstants.BDB_ENVIRONMENT_STORE_DIR, false );
Q.fail();
} catch ( final Throwable t ) {
if ( Q.isBareException( t, BadCallError.class ) ) {
} else {
Q.rethrow( t );
}
}
} finally {
if ( null != env3 ) {
env3.shutdown( true );
}
}
assertTrue( null == env5 );
}
}
|
package VASSAL.command;
import java.awt.Point;
import VASSAL.build.GameModule;
import VASSAL.build.module.GlobalOptions;
import VASSAL.build.module.Map;
import VASSAL.build.module.map.HighlightLastMoved;
import VASSAL.counters.BoundsTracker;
import VASSAL.counters.Deck;
import VASSAL.counters.DeckVisitor;
import VASSAL.counters.DeckVisitorDispatcher;
import VASSAL.counters.GamePiece;
import VASSAL.counters.PieceVisitorDispatcher;
import VASSAL.counters.Properties;
import VASSAL.counters.Stack;
/**
* Command that moves a piece to a new location and position within a stack.
* While this can be accomplished with a {@link ChangePiece} command, this
* command is safer in terms of recovering from changes to the game state that may have occurred
* since the command was created. For instance, A {@link ChangePiece} command that adds
* a piece to a {@link VASSAL.counters.Stack} will cause the piece to disappear if the
* stack has been deleted. This Command will recover more gracefully.
*/
public class MovePiece extends Command {
private final String id;
private final String newMapId;
private final String oldMapId;
private final Point newPosition;
private final Point oldPosition;
private final String newUnderneathId;
private final String oldUnderneathId;
private final String playerId;
/**
*
* @param id The id of the piece being moved
* @param newMapId The id of the map being moved to
* @param newPosition the new position
* @param newUnderneathId The id of the piece which will be immediately beneath this piece in any containing Stack. May be null
* @param oldMapId The id of the map being moved from
* @param oldPosition the old position
* @param oldUnderneathId The id of the piece which was immediately beneath this piece in its original containing Stack.
* @param playerId the id of the player making this move
*/
public MovePiece(String id, String newMapId, Point newPosition, String newUnderneathId, String oldMapId, Point oldPosition, String oldUnderneathId, String playerId) {
this.id = id;
this.newMapId = newMapId;
this.oldMapId = oldMapId;
this.newPosition = newPosition;
this.oldPosition = oldPosition;
this.newUnderneathId = newUnderneathId;
this.oldUnderneathId = oldUnderneathId;
this.playerId = playerId;
}
public String getId() {
return id;
}
public String getNewMapId() {
return newMapId;
}
public String getOldMapId() {
return oldMapId;
}
public Point getNewPosition() {
return newPosition;
}
public Point getOldPosition() {
return oldPosition;
}
public String getNewUnderneathId() {
return newUnderneathId;
}
public String getOldUnderneathId() {
return oldUnderneathId;
}
public String getPlayerId() {
return playerId;
}
@Override
protected void executeCommand() {
final GamePiece piece = GameModule.getGameModule().getGameState().getPieceForId(id);
if (piece != null) {
final BoundsTracker bounds = new BoundsTracker();
bounds.addPiece(piece);
final Map newMap = Map.getMapById(newMapId);
if (newMap != null) {
final PieceVisitorDispatcher mergeFinder = createMergeFinder(newMap, piece, newPosition);
if (newUnderneathId != null) {
final GamePiece under = GameModule.getGameModule().getGameState().getPieceForId(newUnderneathId);
if (under != null
&& under.getPosition().equals(newPosition)
&& under.getMap() == newMap) { //BR// lest someone have simultaneously moved or deleted the piece.
newMap.getStackMetrics().merge(under, piece);
}
else {
if (newMap.apply(mergeFinder) == null) {
newMap.placeAt(piece, newPosition);
}
}
}
else {
if (newMap.apply(mergeFinder) == null) {
if (newMap.getStackMetrics().isStackingEnabled()
&& !Boolean.TRUE.equals(piece.getProperty(Properties.NO_STACK))) {
final Stack s = new Stack();
s.add(piece);
GameModule.getGameModule().getGameState().addPiece(s);
newMap.placeAt(s, newPosition);
}
else {
newMap.placeAt(piece, newPosition);
}
}
if (piece.getParent() != null) {
piece.getParent().insert(piece, 0);
}
}
}
else {
final Map oldMap = Map.getMapById(oldMapId);
if (oldMap != null) {
oldMap.removePiece(piece);
}
}
bounds.addPiece(piece);
// Highlight the stack the piece was moved to
HighlightLastMoved.setLastMoved(piece);
bounds.repaint();
if (piece.getMap() != null
&& GlobalOptions.getInstance().centerOnOpponentsMove()
&& !Boolean.TRUE.equals(piece.getProperty(Properties.INVISIBLE_TO_ME))) {
piece.getMap().ensureVisible(piece.getMap().selectionBoundsOf(piece));
}
}
}
@Override
protected Command myUndoCommand() {
return new MovePiece(id, oldMapId, oldPosition, oldUnderneathId, newMapId, newPosition, newUnderneathId, playerId);
}
/**
* Creates a new {@link PieceVisitorDispatcher} that will create a {@link Command} object
* to merge the target piece with any applicable pieces at the target location
* @param map
* @param p
* @param pt
* @return
*/
protected PieceVisitorDispatcher createMergeFinder(final Map map, final GamePiece p, final Point pt) {
return new DeckVisitorDispatcher(new DeckVisitor() {
@Override
public Object visitDeck(Deck d) {
if (d.getPosition().equals(pt)) {
return map.getStackMetrics().merge(d, p);
}
else {
return null;
}
}
@Override
public Object visitStack(Stack s) {
if (s.getPosition().equals(pt)
&& map.getStackMetrics().isStackingEnabled()
&& !Boolean.TRUE.equals(p.getProperty(Properties.NO_STACK))
&& s.topPiece(playerId) != null //NOTE: topPiece() returns the top VISIBLE piece (not hidden by Invisible trait)
&& map.getPieceCollection().canMerge(p, s)) {
return map.getStackMetrics().merge(s, p);
}
else {
return null;
}
}
@Override
public Object visitDefault(GamePiece piece) {
if (piece.getPosition().equals(pt)
&& map.getStackMetrics().isStackingEnabled()
&& !Boolean.TRUE.equals(p.getProperty(Properties.NO_STACK))
&& !Boolean.TRUE.equals(piece.getProperty(Properties.NO_STACK))
&& map.getPieceCollection().canMerge(p, piece)) {
final String hiddenBy = (String) piece.getProperty(Properties.HIDDEN_BY);
if (hiddenBy == null
|| hiddenBy.equals(playerId)) {
return map.getStackMetrics().merge(piece, p);
}
else {
return null;
}
}
else {
return null;
}
}
});
}
@Override
public String getDetails() {
return "id=" + id + ",map=" + newMapId + ",position=" + newPosition + ",under=" + newUnderneathId; //NON-NLS
}
}
|
package org.jdbdt;
import static org.jdbdt.JDBDT.*;
import static org.jdbdt.TestUtil.*;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map.Entry;
import java.util.function.BiPredicate;
import java.util.function.Function;
import static org.junit.Assert.*;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
@SuppressWarnings("javadoc")
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class QueryBuilderTest extends DBTestCase {
@Test
public void testExecPlain() {
DataSource ds =
select(getDB(), UserDAO.COLUMNS)
.from(UserDAO.TABLE_NAME)
.build();
DataSet actual = executeQuery(ds);
DataSet expected =
data(ds, getConversion())
.rows(INITIAL_DATA);
assertDataSet(expected, actual);
}
@Test
public void testExecWhere() throws SQLException {
User u = getDAO().query(EXISTING_DATA_ID1);
DataSource ds =
select(getDB(), UserDAO.COLUMNS)
.from(UserDAO.TABLE_NAME)
.where("login='" + EXISTING_DATA_ID1 + "'")
.build();
DataSet actual = executeQuery(ds);
DataSet expected = data(ds, getConversion()).row(u);
assertDataSet(expected, actual);
}
@Test
public void testExecWhereWithArgs() throws SQLException {
User u = getDAO().query(EXISTING_DATA_ID1);
DataSource ds =
select(getDB(), UserDAO.COLUMNS)
.from(UserDAO.TABLE_NAME)
.where("login=?")
.build(EXISTING_DATA_ID1);
DataSet actual =
executeQuery(ds);
DataSet expected =
data(ds, getConversion())
.row(u);
assertDataSet(expected, actual);
}
@Test
public void testExecColumns1() throws SQLException {
User u = getDAO().query(EXISTING_DATA_ID1);
DataSource q =
select(getDB(), "password")
.from(UserDAO.TABLE_NAME)
.where("login=?")
.build(EXISTING_DATA_ID1);
DataSet actual = executeQuery(q);
DataSet expected =
data(q)
.row(u.getPassword());
assertDataSet(expected, actual);
}
@Test
public void testExecColumns2() throws SQLException {
User u = getDAO().query(EXISTING_DATA_ID1);
DataSource q =
select(getDB(), "password", "name")
.from(UserDAO.TABLE_NAME)
.where("login=?")
.build(EXISTING_DATA_ID1);
DataSet actual =
executeQuery(q);
DataSet expected =
data(q)
.row(u.getPassword(), u.getName());
assertDataSet(expected, actual);
}
@Test
public void testExecWithDistinct1() {
DataSource q =
select(getDB(), UserDAO.COLUMNS)
.distinct()
.from(UserDAO.TABLE_NAME)
.build();
DataSet actual = executeQuery(q);
DataSet expected =
data(q, getConversion())
.rows(INITIAL_DATA);
assertDataSet(expected, actual);
}
@Test
public void testExecWithDistinct2() {
DataSource q =
select(getDB(), "password")
.distinct()
.from(UserDAO.TABLE_NAME)
.build();
DataSet actual =
executeQuery(q);
HashSet<String> distinctPass = new HashSet<>();
DataSet expected = data(q);
for (User u : INITIAL_DATA) {
if (distinctPass.add(u.getPassword())) {
expected.row(u.getPassword());
}
}
assertDataSet(expected, actual);
}
@Test
public void testExecWithOrderBy1() {
DataSource q =
select(getDB(), UserDAO.COLUMNS)
.from(UserDAO.TABLE_NAME)
.orderBy("login")
.build();
DataSet actual = executeQuery(q);
User[] sortedUsers = INITIAL_DATA.clone();
Arrays.sort(sortedUsers,
(a,b) -> a.getLogin().compareTo(b.getLogin()));
DataSet expected = data(q, getConversion()).rows(sortedUsers);
assertTrue(expected.sameDataAs(actual));
}
@Test
public void testExecWithOrderBy2() {
DataSource q =
select(getDB(), UserDAO.COLUMNS)
.from(UserDAO.TABLE_NAME)
.orderBy("password", "login")
.build();
DataSet actual = executeQuery(q);
User[] sortedUsers = INITIAL_DATA.clone();
Arrays.sort(sortedUsers,
(a,b) -> {
int cmp = a.getPassword().compareTo(b.getPassword());
if (cmp == 0) {
cmp = a.getLogin().compareTo(b.getLogin());
}
return cmp;
});
DataSet expected =
data(q, getConversion())
.rows(sortedUsers);
assertTrue(expected.sameDataAs(actual));
}
<T extends Number> DataSet passCount(DataSource ds, T zero, Function<T,T> incr, BiPredicate<String,T> pred) {
DataSet expected = data(ds);
HashMap<String,T> count = new HashMap<>();
for (User u : INITIAL_DATA) {
count.put(u.getPassword(),
incr.apply(count.getOrDefault(u.getPassword(),zero)));
}
for (Entry<String,T> e : count.entrySet()) {
if (pred.test(e.getKey(), e.getValue())) {
expected.row(e.getKey(), e.getValue());
}
}
return expected;
}
@Test
public void testExecWithGroupBy1() {
DataSource q = select(getDB(), "password","count(*)")
.from(UserDAO.TABLE_NAME)
.groupBy("password")
.build();
DataSet expected =
DBCfg.getConfig().doesCountReturnAnInteger() ?
passCount(q, 0, x -> x + 1, (p,n) -> true)
:
passCount(q, 0L, x -> x + 1L, (p,n) -> true);
DataSet actual = executeQuery(q);
assertDataSet(expected, actual);
}
@Test
public void testExecWithGroupBy2() {
DataSource q = select(getDB(), "password","count(*)")
.from(UserDAO.TABLE_NAME)
.groupBy("password")
.having("count(*) > 1")
.build();
DataSet expected =
DBCfg.getConfig().doesCountReturnAnInteger() ?
passCount(q, 0, x -> x + 1, (p,n) -> n > 1)
:
passCount(q, 0L, x -> x + 1L, (p,n) -> n > 1);
DataSet actual = executeQuery(q);
assertDataSet(expected, actual);
}
@Test
public void testExecWithMultipleSources() {
DataSource q =
select(getDB(), "u1.LOGIN", "u2.LOGIN")
.from(UserDAO.TABLE_NAME + " u1", UserDAO.TABLE_NAME + " u2" )
.where("u1.login <> u2.login AND u1.PASSWORD = u2.PASSWORD")
.build();
DataSet expected = data(q);
getDB().enable(DB.Option.LOG_QUERIES);
for (int i=0; i < INITIAL_DATA.length; i++) {
User a = INITIAL_DATA[i];
for (int j=i+1; j < INITIAL_DATA.length; j++) {
User b = INITIAL_DATA[j];
if (a.getPassword().equals(b.getPassword())) {
expected.row(a.getLogin(), b.getLogin())
.row(b.getLogin(), a.getLogin());
}
}
}
DataSet actual = executeQuery(q);
assertDataSet(expected, actual);
}
}
|
package com.ForgeEssentials.core.misc;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.util.HashMap;
import java.util.Set;
import java.util.TreeSet;
import net.minecraft.item.Item;
import net.minecraft.item.ItemBlock;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import com.ForgeEssentials.util.FunctionHelper;
import com.google.common.base.Strings;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import cpw.mods.fml.common.registry.GameData;
import cpw.mods.fml.common.registry.ItemData;
public abstract class UnfreindlyItemList
{
private static final HashBiMap<String, Integer> map = HashBiMap.create();
private static final String VANILLA = "vanilla";
private static final String UNKNOWN = "unknownSource";
private UnfreindlyItemList()
{
}
/**
* should be called at PostLoad.
*/
public static void modStep()
{
HashMap<Integer, String> gameMap = new HashMap<Integer, String>();
map.clear();
// populate from GameData
{
NBTTagList list = new NBTTagList();
GameData.writeItemData(list);
ItemData data;
String modid;
for (int i = 0; i < list.tagCount(); i++)
{
data = new ItemData((NBTTagCompound) list.tagAt(i));
modid = VANILLA;
if (!data.getModId().equalsIgnoreCase("Minecraft"))
{
modid = data.getModId();
}
gameMap.put(data.getItemId(), modid);
}
}
// now iterrate through ItemList.
HashMap<String, Integer> duplicates = new HashMap<String, Integer>();
String name;
Integer num;
String tempName;
for (int i = 0; i < Item.itemsList.length; i++)
{
Item item = Item.itemsList[i];
if (item == null)
{
continue;
}
// get the name..
name = item.getItemName();
if (name == null)
{
if (item instanceof ItemBlock)
name = "block.";
else
name = "item.";
name = name + item.getClass().getSimpleName();
}
// split items and blocks
name = name.replace("tile.", "block.");
// get source.
tempName = gameMap.get(item.itemID);
if (Strings.isNullOrEmpty(tempName))
{
name = UNKNOWN+"." + name;
}
else
{
name = tempName + "." + name;
}
// add numbers to the end of duplicates
num = duplicates.get(name);
if (num == null)
{
duplicates.put(name, 0);
}
else
{
num++;
duplicates.put(name, num);
name += num;
}
name = name.replace(' ', '_');
// save
map.put(name, item.itemID);
}
}
/**
* @param name name of the block.
* @return -1 if the name does not exist.
*/
public static int getId(String name)
{
Integer id = map.get(name);
return id == null ? -1 : id;
}
/**
* @Param ID
* @return null if the ID does not exist
*/
public static String getName(int id)
{
String name = map.inverse().get(id);
if (Strings.isNullOrEmpty(name))
return UNKNOWN+"."+id;
else
return name;
}
public static Set<String> getNameSet()
{
return map.keySet();
}
public static void output(File output)
{
try
{
output.createNewFile();
BufferedWriter writer = new BufferedWriter(new FileWriter(output));
writer.write("
writer.newLine();
writer.write("
writer.newLine();
writer.write("
writer.newLine();
writer.newLine();
TreeSet<Integer> ids = new TreeSet<Integer>();
BiMap<Integer, String> inverse = map.inverse();
// order ids.
for (Integer id : map.inverse().keySet())
{
ids.add(id);
}
String str;
for (Integer id : ids)
{
str = String.format("%-7s", id);
str = str + " == " + inverse.get(id);
writer.write(str);
writer.newLine();
}
writer.close();
}
catch (Exception e)
{
}
}
}
|
package net.nunnerycode.bukkit.mythicdrops.items.builders;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import net.nunnerycode.bukkit.mythicdrops.MythicDropsPlugin;
import net.nunnerycode.bukkit.mythicdrops.api.enchantments.MythicEnchantment;
import net.nunnerycode.bukkit.mythicdrops.api.items.ItemGenerationReason;
import net.nunnerycode.bukkit.mythicdrops.api.items.MythicItemStack;
import net.nunnerycode.bukkit.mythicdrops.api.items.NonrepairableItemStack;
import net.nunnerycode.bukkit.mythicdrops.api.items.builders.DropBuilder;
import net.nunnerycode.bukkit.mythicdrops.api.names.NameType;
import net.nunnerycode.bukkit.mythicdrops.api.tiers.Tier;
import net.nunnerycode.bukkit.mythicdrops.items.TierMap;
import net.nunnerycode.bukkit.mythicdrops.names.NameMap;
import net.nunnerycode.bukkit.mythicdrops.utils.ItemStackUtil;
import net.nunnerycode.bukkit.mythicdrops.utils.ItemUtil;
import net.nunnerycode.bukkit.mythicdrops.utils.RandomRangeUtil;
import org.apache.commons.lang.math.NumberUtils;
import org.apache.commons.lang.math.RandomUtils;
import org.apache.commons.lang3.text.WordUtils;
import org.bukkit.Bukkit;
import org.bukkit.Color;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.enchantments.EnchantmentWrapper;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.LeatherArmorMeta;
import org.bukkit.material.MaterialData;
public final class MythicDropBuilder implements DropBuilder {
private Tier tier;
private MaterialData materialData;
private ItemGenerationReason itemGenerationReason;
private World world;
private boolean useDurability;
public MythicDropBuilder() {
tier = null;
materialData = new MaterialData(Material.AIR);
itemGenerationReason = ItemGenerationReason.DEFAULT;
world = Bukkit.getServer().getWorlds().get(0);
useDurability = false;
}
@Override
public DropBuilder withTier(Tier tier) {
this.tier = tier;
return this;
}
@Override
public DropBuilder withTier(String tierName) {
this.tier = TierMap.getInstance().get(tierName);
return this;
}
@Override
public DropBuilder withMaterialData(MaterialData materialData) {
this.materialData = materialData;
return this;
}
@Override
public DropBuilder withMaterialData(String materialDataString) {
MaterialData matData = null;
if (materialDataString.contains(";")) {
String[] split = materialDataString.split(";");
matData = new MaterialData(NumberUtils.toInt(split[0], 0), (byte) NumberUtils.toInt(split[1], 0));
} else {
matData = new MaterialData(NumberUtils.toInt(materialDataString, 0));
}
this.materialData = matData;
return this;
}
@Override
public DropBuilder withItemGenerationReason(ItemGenerationReason reason) {
this.itemGenerationReason = reason;
return this;
}
@Override
public DropBuilder inWorld(World world) {
this.world = world;
return this;
}
@Override
public DropBuilder inWorld(String worldName) {
this.world = Bukkit.getWorld(worldName);
return this;
}
@Override
public DropBuilder useDurability(boolean b) {
this.useDurability = b;
return this;
}
@Override
public MythicItemStack build() {
NonrepairableItemStack nis;
World w = world != null ? world : Bukkit.getWorlds().get(0);
Tier t = (tier != null) ? tier : TierMap.getInstance().getRandomWithChance(w.getName());
MaterialData md = (materialData != null) ? materialData : ItemUtil.getRandomMaterialDataFromCollection
(ItemUtil.getMaterialDatasFromTier(t));
nis = new NonrepairableItemStack(md.toItemStack(1));
addBaseEnchantments(nis, t);
addBonusEnchantments(nis, t);
if (useDurability) {
nis.setDurability(ItemStackUtil.getDurabilityForMaterial(nis.getType(), t.getMinimumDurabilityPercentage
(), t.getMaximumDurabilityPercentage()));
}
nis.getItemMeta().setDisplayName(generateName(nis));
nis.getItemMeta().setLore(generateLore(nis));
if (nis.getItemMeta() instanceof LeatherArmorMeta) {
((LeatherArmorMeta) nis.getItemMeta()).setColor(Color.fromRGB(RandomUtils.nextInt(255),
RandomUtils.nextInt(255), RandomUtils.nextInt(255)));
}
return nis;
}
private void addBonusEnchantments(MythicItemStack is, Tier t) {
if (t.getMaximumBonusEnchantments() > 0) {
int total = (int) RandomRangeUtil.randomRangeLongInclusive(t.getMinimumBonusEnchantments(),
t.getMaximumBonusEnchantments());
int added = 0;
Set<MythicEnchantment> bonusEnchantments = t.getBonusEnchantments();
List<Enchantment> naturalEnchantments = new ArrayList<>();
for (Enchantment e : Enchantment.values()) {
if (t.isSafeBonusEnchantments()) {
if (e.canEnchantItem(is)) {
naturalEnchantments.add(e);
}
} else {
naturalEnchantments.add(e);
}
}
while (added < total) {
for (MythicEnchantment me : bonusEnchantments) {
if (added >= total) {
break;
}
if (!naturalEnchantments.contains(me.getEnchantment()) || RandomUtils.nextDouble() >= 1.0D /
bonusEnchantments.size()) {
continue;
}
int level = (int) Math.min(Math.max(RandomRangeUtil.randomRangeLongInclusive(me.getMinimumLevel(),
me.getMaximumLevel()), 1), 127);
int isLevel = is.getEnchantmentLevel(me.getEnchantment());
int actLevel = (isLevel == 0) ? level : isLevel + level;
if (t.isAllowHighBonusEnchantments()) {
is.addUnsafeEnchantment(me.getEnchantment(), actLevel);
} else {
is.addUnsafeEnchantment(me.getEnchantment(), getAcceptableEnchantmentLevel(me.getEnchantment
(), actLevel));
}
added++;
}
}
}
}
private void addBaseEnchantments(MythicItemStack is, Tier t) {
for (MythicEnchantment me : t.getBaseEnchantments()) {
if (me.getEnchantment() == null) {
continue;
}
if (t.isSafeBaseEnchantments() && me.getEnchantment().canEnchantItem(is)) {
EnchantmentWrapper enchantmentWrapper = new EnchantmentWrapper(me.getEnchantment().getId());
int minimumLevel = Math.max(me.getMinimumLevel(), enchantmentWrapper.getStartLevel());
int maximumLevel = Math.min(me.getMaximumLevel(), enchantmentWrapper.getMaxLevel());
if (t.isAllowHighBaseEnchantments()) {
is.addUnsafeEnchantment(me.getEnchantment(), (int) RandomRangeUtil.randomRangeLongInclusive
(minimumLevel, maximumLevel));
} else {
is.addEnchantment(me.getEnchantment(), getAcceptableEnchantmentLevel(me.getEnchantment(),
(int) RandomRangeUtil.randomRangeLongInclusive(minimumLevel, maximumLevel)));
}
} else if (!t.isSafeBaseEnchantments()) {
is.addUnsafeEnchantment(me.getEnchantment(),
(int) RandomRangeUtil.randomRangeLongInclusive(me.getMinimumLevel(), me.getMaximumLevel()));
}
}
}
private int getAcceptableEnchantmentLevel(Enchantment ench, int level) {
EnchantmentWrapper ew = new EnchantmentWrapper(ench.getId());
return Math.max(Math.min(level, ew.getMaxLevel()), ew.getStartLevel());
}
private List<String> generateLore(ItemStack itemStack) {
List<String> lore = new ArrayList<String>();
if (itemStack == null || tier == null) {
return lore;
}
List<String> tooltipFormat = MythicDropsPlugin.getInstance().getConfigSettings().getTooltipFormat();
String minecraftName = getMinecraftMaterialName(itemStack.getData().getItemType());
String mythicName = getMythicMaterialName(itemStack.getData());
String itemType = getItemTypeName(itemStack.getData());
String tierName = tier.getDisplayName();
String enchantment = getEnchantmentTypeName(itemStack);
for (String s : tooltipFormat) {
String line = s;
line = line.replace("%basematerial%", minecraftName != null ? minecraftName : "");
line = line.replace("%mythicmaterial%", mythicName != null ? mythicName : "");
line = line.replace("%itemtype%", itemType != null ? itemType : "");
line = line.replace("%tiername%", tierName != null ? tierName : "");
line = line.replace("%enchantment%", enchantment != null ? enchantment : "");
line = line.replace('&', '\u00A7').replace("\u00A7\u00A7", "&");
lore.add(line);
}
return lore;
}
private String getEnchantmentTypeName(ItemStack itemStack) {
Enchantment enchantment = ItemStackUtil.getHighestEnchantment(itemStack);
if (enchantment == null) {
return MythicDropsPlugin.getInstance().getConfigSettings().getFormattedLanguageString("displayNames" +
".Ordinary");
}
String ench = MythicDropsPlugin.getInstance().getConfigSettings().getFormattedLanguageString("displayNames."
+ enchantment.getName());
if (ench != null) {
return ench;
}
return "Ordinary";
}
private String getMythicMaterialName(MaterialData matData) {
String comb =
String.format("%s;%s", String.valueOf(matData.getItemTypeId()), String.valueOf(matData.getData()));
String comb2;
if (matData.getData() == (byte) 0) {
comb2 = String.valueOf(matData.getItemTypeId());
} else {
comb2 = comb;
}
String mythicMatName = MythicDropsPlugin.getInstance().getConfigSettings().getFormattedLanguageString(
"displayNames." + comb.toLowerCase());
if (mythicMatName == null) {
mythicMatName = MythicDropsPlugin.getInstance().getConfigSettings().getFormattedLanguageString(
"displayNames." + comb2.toLowerCase());
if (mythicMatName == null) {
mythicMatName = getMinecraftMaterialName(matData.getItemType());
}
}
return WordUtils.capitalize(mythicMatName);
}
private String getMinecraftMaterialName(Material material) {
String prettyMaterialName = "";
String matName = material.name();
String[] split = matName.split("_");
for (String s : split) {
if (s.equals(split[split.length - 1])) {
prettyMaterialName = String
.format("%s%s%s", prettyMaterialName, s.substring(0, 1).toUpperCase(), s.substring(1,
s.length()).toLowerCase());
} else {
prettyMaterialName = prettyMaterialName
+ (String.format("%s%s", s.substring(0, 1).toUpperCase(), s.substring(1,
s.length()).toLowerCase())) + " ";
}
}
return WordUtils.capitalize(prettyMaterialName);
}
private String getItemTypeName(MaterialData matData) {
String itemType = getItemTypeFromMaterialData(matData);
if (itemType == null) {
return null;
}
String mythicMatName = MythicDropsPlugin.getInstance().getConfigSettings().getFormattedLanguageString(
"displayNames." + itemType.toLowerCase());
if (mythicMatName == null) {
mythicMatName = itemType;
}
return WordUtils.capitalize(mythicMatName);
}
private String getItemTypeFromMaterialData(MaterialData matData) {
String comb =
String.format("%s;%s", String.valueOf(matData.getItemTypeId()), String.valueOf(matData.getData()));
String comb2;
if (matData.getData() == (byte) 0) {
comb2 = String.valueOf(matData.getItemTypeId());
} else {
comb2 = comb;
}
String comb3 = String.valueOf(matData.getItemTypeId());
Map<String, List<String>> ids = new HashMap<String, List<String>>();
ids.putAll(MythicDropsPlugin.getInstance().getConfigSettings().getItemTypesWithIds());
for (Map.Entry<String, List<String>> e : ids.entrySet()) {
if (e.getValue().contains(comb)
|| e.getValue().contains(comb2) || e.getValue().contains(comb3)) {
if (MythicDropsPlugin.getInstance().getConfigSettings().getMaterialTypes().contains(e.getKey())) {
continue;
}
return e.getKey();
}
}
return null;
}
private String generateName(ItemStack itemStack) {
if (itemStack == null || tier == null) {
return "Mythic Item";
}
String format = MythicDropsPlugin.getInstance().getConfigSettings().getItemDisplayNameFormat();
if (format == null) {
return "Mythic Item";
}
String minecraftName = getMinecraftMaterialName(itemStack.getData().getItemType());
String mythicName = getMythicMaterialName(itemStack.getData());
String generalPrefix = NameMap.getInstance().getRandom(NameType.GENERAL_PREFIX, "");
String generalSuffix = NameMap.getInstance().getRandom(NameType.GENERAL_SUFFIX, "");
String materialPrefix = NameMap.getInstance().getRandom(NameType.MATERIAL_PREFIX, itemStack.getType().name());
String materialSuffix = NameMap.getInstance().getRandom(NameType.MATERIAL_SUFFIX, itemStack.getType().name());
String tierPrefix = NameMap.getInstance().getRandom(NameType.TIER_PREFIX, tier.getName());
String tierSuffix = NameMap.getInstance().getRandom(NameType.TIER_SUFFIX, tier.getName());
String itemType = getItemTypeName(itemStack.getData());
String tierName = tier.getDisplayName();
String enchantment = getEnchantmentTypeName(itemStack);
Enchantment highestEnch = ItemStackUtil.getHighestEnchantment(itemStack);
String enchantmentPrefix = NameMap.getInstance().getRandom(NameType.ENCHANTMENT_PREFIX, highestEnch.getName());
String enchantmentSuffix = NameMap.getInstance().getRandom(NameType.ENCHANTMENT_SUFFIX, highestEnch.getName());
String name = format;
if (name.contains("%basematerial%")) {
name = name.replace("%basematerial%", minecraftName);
}
if (name.contains("%mythicmaterial%")) {
name = name.replace("%mythicmaterial%", mythicName);
}
if (name.contains("%generalprefix%")) {
name = name.replace("%generalprefix%", generalPrefix);
}
if (name.contains("%generalsuffix%")) {
name = name.replace("%generalsuffix%", generalSuffix);
}
if (name.contains("%materialprefix%")) {
name = name.replace("%materialprefix%", materialPrefix);
}
if (name.contains("%materialsuffix%")) {
name = name.replace("%materialsuffix%", materialSuffix);
}
if (name.contains("%tierprefix%")) {
name = name.replace("%tierprefix%", tierPrefix);
}
if (name.contains("%tiersuffix%")) {
name = name.replace("%tiersuffix%", tierSuffix);
}
if (name.contains("%itemtype%")) {
name = name.replace("%itemtype%", itemType);
}
if (name.contains("%tiername%")) {
name = name.replace("%tiername%", tierName);
}
if (name.contains("%enchantment%")) {
name = name.replace("%enchantment%", enchantment);
}
if (name.contains("%enchantmentprefix%")) {
name = name.replace("%enchantmentprefix%", enchantmentPrefix);
}
if (name.contains("%enchantmentsuffix%")) {
name = name.replace("%enchantmentsuffix%", enchantmentSuffix);
}
return tier.getDisplayColor() + name.replace('&', '\u00A7').replace("\u00A7\u00A7", "&").trim() +
tier.getIdentificationColor();
}
}
|
package ro.dcsi.internship;
import java.util.List;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
public class AppTest {
static String resourcesFolder = "src/test/resources/";
@Test
public void testApp() {
UserController controller = new UserController();
List<User> existingUsers = controller.readUsers(resourcesFolder + "users.csv");
User[] users = new User[existingUsers.size()];
controller.writeUsers(resourcesFolder + "tempUsers", existingUsers.toArray(users));
List<User> tempUsers = controller.readUsers(resourcesFolder + "tempUsers");
Assert.assertEquals(existingUsers.size(), tempUsers.size());
for (int i = 0; i< existingUsers.size(); i++) {
Assert.assertEquals(existingUsers.get(i).toString(), tempUsers.get(i).toString());
}
}
@Test
public void testReadAppSorin() {
UserDao appS = new UserDaoSorin();
System.out.println(appS.readUsers(resourcesFolder + "sorinUsersStyle.csv"));
}
@Test
public void testAppSorin() {
UserDao appS = new UserDaoSorin();
appS.writeUsers(resourcesFolder + "newSorinUsersCsv.csv",
new TheUser("ion12", "abc", "IonIon", 755, 22, "RO", "ion.ion@ionmail.com"),
new TheUser("gigi123200", "qwerty", "GigelMasan", 753, 21, "RO", "gigi.ggg@gmail.com"));
appS.writeUsers(resourcesFolder + "new2SorinUsersCsv2.csv",
new TheUser("sorin", "mnqw12", "SorinDragan", 777, 20, "RO", "sorin.dragan27@gmail.com"));
Assert.assertEquals(2, appS.readUsers(resourcesFolder + "newSorinUsersCsv.csv").size());
Assert.assertEquals(1, appS.readUsers(resourcesFolder + "new2SorinUsersCsv2.csv").size());
}
@Test
public void testApp3() {
App3.writeDataInFile("");
App3.readDataFromFile();
}
@Test
@Ignore
public void testCostin(){
UserDao app= new UserDaoCostin();
app.writeUsers("file1",new TheUser(),new TheUser());
app.writeUsers("file2",new TheUser());
Assert.assertEquals(2,app.readUsers("file1").size());
Assert.assertEquals(1,app.readUsers("file2").size());
}
}
|
package com.net2plan.cli.plugins;
import com.net2plan.utils.StringUtils;
import org.apache.commons.cli.ParseException;
import org.junit.Test;
public class CLINetworkDesignTest
{
private final static CLINetworkDesign networkDesign = new CLINetworkDesign();
@Test(expected = ParseException.class)
public void launchNoOptionParam() throws ParseException
{
String[] args = StringUtils.arrayOf("--class-name", " ", "--output-file", "");
networkDesign.executeFromCommandLine(args);
}
@Test(expected = ParseException.class)
public void launchNoClassNameParam() throws ParseException
{
String[] args = StringUtils.arrayOf("--package-name", " ", "--output-file", "");
networkDesign.executeFromCommandLine(args);
}
@Test(expected = ParseException.class)
public void launchNoOutputFileParam() throws ParseException
{
String[] args = StringUtils.arrayOf("--package-name", " ", "--class-name", "");
networkDesign.executeFromCommandLine(args);
}
@Test(expected = RuntimeException.class)
public void launchBothOptionParam() throws ParseException
{
String[] args = StringUtils.arrayOf("--class-file", " ", "--package-name", " ", "--class-name", " ", "--output-file", "");
networkDesign.executeFromCommandLine(args);
}
}
|
package test.com.qiniu.rtc;
import com.qiniu.common.QiniuException;
import com.qiniu.rtc.AppManager;
import com.qiniu.rtc.RoomManager;
import com.qiniu.util.Auth;
import org.junit.Test;
import test.com.qiniu.TestConfig;
public class RtcTest {
private String ak = "DXFtikq1Y";//AccessKey you get from qiniu
private String sk = "F397hz";//SecretKey you get from qiniu
private Auth auth = null;
{
try {
auth = Auth.create(ak, sk);
} catch (Exception e) {
auth = TestConfig.testAuth;
}
}
private AppManager manager = new AppManager(auth);
private RoomManager rmanager = new RoomManager(auth);
@Test
public void creatApp() {
try {
System.out.print(manager.creatApp("zw111", "zwhome", 10, false, false, false));
} catch (QiniuException e) {
e.printStackTrace();
}
}
@Test
public void getApp() {
try {
System.out.print(manager.getApp("dex74xpqd"));
} catch (QiniuException e) {
e.printStackTrace();
}
}
@Test
public void deleteApp() {
try {
System.out.print(manager.deleteApp("dex74xpqd"));
} catch (QiniuException e) {
e.printStackTrace();
} catch (Exception e1) {
}
}
@Test
public void updateApp() {
try {
System.out.print(manager.updateApp("dex74xpqd", "zwte123", "zw111", 10, false, false, false));
} catch (QiniuException e) {
e.printStackTrace();
}
}
@Test
public void listUser() {
try {
System.out.print(rmanager.listUser("dex74xpqd", "ww"));
} catch (QiniuException e) {
e.printStackTrace();
}
}
@Test
public void kickUser() {
try {
System.out.print(rmanager.kickUser("dex74xpqd", "roomid", "userid"));
} catch (QiniuException e) {
e.printStackTrace();
} catch (Exception e1) {
}
}
@Test
public void listActiveRoom() {
try {
System.out.print(rmanager.listActiveRoom("dex74xpqd", null, 1, 2));
} catch (QiniuException e) {
e.printStackTrace();
}
}
@Test
public void getRoomToken() {
try {
System.out.print(rmanager.getRoomToken("dex74xpqd", "roomid", "userid", 3600, "admin"));
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
package com.marshalchen.ultimaterecyclerview.demo;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.ActionBarActivity;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.ActionMode;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Spinner;
import com.marshalchen.ultimaterecyclerview.DragDropTouchListener;
import com.marshalchen.ultimaterecyclerview.ItemTouchListenerAdapter;
import com.marshalchen.ultimaterecyclerview.SwipeableRecyclerViewTouchListener;
import com.marshalchen.ultimaterecyclerview.URLogs;
import com.marshalchen.ultimaterecyclerview.ObservableScrollState;
import com.marshalchen.ultimaterecyclerview.ObservableScrollViewCallbacks;
import com.marshalchen.ultimaterecyclerview.UltimateRecyclerView;
import com.marshalchen.ultimaterecyclerview.animators.BaseItemAnimator;
import com.marshalchen.ultimaterecyclerview.animators.*;
import com.marshalchen.ultimaterecyclerview.demo.swipelist.SwipeListViewExampleActivity;
import com.timehop.stickyheadersrecyclerview.StickyRecyclerHeadersDecoration;
import java.util.ArrayList;
import java.util.List;
public class MainActivity extends AppCompatActivity implements ActionMode.Callback {
UltimateRecyclerView ultimateRecyclerView;
SimpleAdapter simpleRecyclerViewAdapter = null;
LinearLayoutManager linearLayoutManager;
int moreNum = 2;
private ActionMode actionMode;
Toolbar toolbar;
boolean isDrag = true;
DragDropTouchListener dragDropTouchListener;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
toolbar = (Toolbar) findViewById(R.id.tool_bar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayShowTitleEnabled(false);
ultimateRecyclerView = (UltimateRecyclerView) findViewById(R.id.ultimate_recycler_view);
ultimateRecyclerView.setHasFixedSize(false);
final List<String> stringList = new ArrayList<>();
stringList.add("111");
stringList.add("aaa");
stringList.add("222");
stringList.add("33");
stringList.add("44");
stringList.add("55");
stringList.add("66");
stringList.add("11771");
simpleRecyclerViewAdapter = new SimpleAdapter(stringList);
linearLayoutManager = new LinearLayoutManager(this);
ultimateRecyclerView.setLayoutManager(linearLayoutManager);
ultimateRecyclerView.setAdapter(simpleRecyclerViewAdapter);
StickyRecyclerHeadersDecoration headersDecor = new StickyRecyclerHeadersDecoration(simpleRecyclerViewAdapter);
ultimateRecyclerView.addItemDecoration(headersDecor);
ultimateRecyclerView.enableLoadmore();
simpleRecyclerViewAdapter.setCustomLoadMoreView(LayoutInflater.from(this)
.inflate(R.layout.custom_bottom_progressbar, null));
ultimateRecyclerView.setParallaxHeader(getLayoutInflater().inflate(R.layout.parallax_recyclerview_header, ultimateRecyclerView.mRecyclerView, false));
ultimateRecyclerView.setOnParallaxScroll(new UltimateRecyclerView.OnParallaxScroll() {
@Override
public void onParallaxScroll(float percentage, float offset, View parallax) {
Drawable c = toolbar.getBackground();
c.setAlpha(Math.round(127 + percentage * 128));
toolbar.setBackgroundDrawable(c);
}
});
ultimateRecyclerView.setRecylerViewBackgroundColor(Color.parseColor("#ffffff"));
ultimateRecyclerView.setDefaultOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
simpleRecyclerViewAdapter.insert(moreNum++ + " Refresh things", 0);
ultimateRecyclerView.setRefreshing(false);
// ultimateRecyclerView.scrollBy(0, -50);
linearLayoutManager.scrollToPosition(0);
// ultimateRecyclerView.setAdapter(simpleRecyclerViewAdapter);
// simpleRecyclerViewAdapter.notifyDataSetChanged();
}
}, 1000);
}
});
ultimateRecyclerView.setOnLoadMoreListener(new UltimateRecyclerView.OnLoadMoreListener() {
@Override
public void loadMore(int itemsCount, final int maxLastVisiblePosition) {
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
public void run() {
simpleRecyclerViewAdapter.insert("More " + moreNum++, simpleRecyclerViewAdapter.getAdapterItemCount());
simpleRecyclerViewAdapter.insert("More " + moreNum++, simpleRecyclerViewAdapter.getAdapterItemCount());
simpleRecyclerViewAdapter.insert("More " + moreNum++, simpleRecyclerViewAdapter.getAdapterItemCount());
// linearLayoutManager.scrollToPositionWithOffset(maxLastVisiblePosition,-1);
// linearLayoutManager.scrollToPosition(maxLastVisiblePosition);
}
}, 1000);
}
});
// ultimateRecyclerView.setDefaultSwipeToRefreshColorScheme(getResources().getColor(android.R.color.holo_blue_bright),
// getResources().getColor(android.R.color.holo_green_light),
// getResources().getColor(android.R.color.holo_orange_light),
// getResources().getColor(android.R.color.holo_red_light));
ultimateRecyclerView.setScrollViewCallbacks(new ObservableScrollViewCallbacks() {
@Override
public void onScrollChanged(int scrollY, boolean firstScroll, boolean dragging) {
}
@Override
public void onDownMotionEvent() {
}
@Override
public void onUpOrCancelMotionEvent(ObservableScrollState observableScrollState) {
if (observableScrollState == ObservableScrollState.DOWN) {
ultimateRecyclerView.showToolbar(toolbar, ultimateRecyclerView, getScreenHeight());
ultimateRecyclerView.showFloatingActionMenu();
} else if (observableScrollState == ObservableScrollState.UP) {
ultimateRecyclerView.hideToolbar(toolbar, ultimateRecyclerView, getScreenHeight());
ultimateRecyclerView.hideFloatingActionMenu();
} else if (observableScrollState == ObservableScrollState.STOP) {
}
}
});
ultimateRecyclerView.addOnItemTouchListener(new SwipeableRecyclerViewTouchListener(ultimateRecyclerView.mRecyclerView,
new SwipeableRecyclerViewTouchListener.SwipeListener() {
@Override
public boolean canSwipe(int position) {
if (position > 0)
return true;
else return false;
}
@Override
public void onDismissedBySwipeLeft(RecyclerView recyclerView, int[] reverseSortedPositions) {
for (int position : reverseSortedPositions) {
simpleRecyclerViewAdapter.remove(position);
}
simpleRecyclerViewAdapter.notifyDataSetChanged();
}
@Override
public void onDismissedBySwipeRight(RecyclerView recyclerView, int[] reverseSortedPositions) {
for (int position : reverseSortedPositions) {
simpleRecyclerViewAdapter.remove(position);
}
simpleRecyclerViewAdapter.notifyDataSetChanged();
}
}));
ItemTouchListenerAdapter itemTouchListenerAdapter = new ItemTouchListenerAdapter(ultimateRecyclerView.mRecyclerView,
new ItemTouchListenerAdapter.RecyclerViewOnItemClickListener() {
@Override
public void onItemClick(RecyclerView parent, View clickedView, int position) {
}
@Override
public void onItemLongClick(RecyclerView parent, View clickedView, int position) {
URLogs.d("onItemLongClick()" + isDrag);
if (isDrag) {
URLogs.d("onItemLongClick()" + isDrag);
dragDropTouchListener.startDrag();
ultimateRecyclerView.enableDefaultSwipeRefresh(false);
}
}
});
ultimateRecyclerView.mRecyclerView.addOnItemTouchListener(itemTouchListenerAdapter);
dragDropTouchListener = new DragDropTouchListener(ultimateRecyclerView.mRecyclerView, this) {
@Override
protected void onItemSwitch(RecyclerView recyclerView, int from, int to) {
if (from > 0 && to > 0) {
simpleRecyclerViewAdapter.swapPositions(from, to);
// //simpleRecyclerViewAdapter.clearSelection(from);
// simpleRecyclerViewAdapter.notifyItemChanged(to);
//simpleRecyclerViewAdapter.remove(position);
// simpleRecyclerViewAdapter.notifyDataSetChanged();
URLogs.d("switch
// simpleRecyclerViewAdapter.insert(simpleRecyclerViewAdapter.remove(););
}
}
@Override
protected void onItemDrop(RecyclerView recyclerView, int position) {
URLogs.d("drop
ultimateRecyclerView.enableDefaultSwipeRefresh(true);
simpleRecyclerViewAdapter.notifyDataSetChanged();
}
};
dragDropTouchListener.setCustomDragHighlight(getResources().getDrawable(R.drawable.custom_drag_frame));
ultimateRecyclerView.mRecyclerView.addOnItemTouchListener(dragDropTouchListener);
Spinner spinner = (Spinner) findViewById(R.id.spinner);
ArrayAdapter<String> spinnerAdapter =
new ArrayAdapter<>(this, android.R.layout.simple_list_item_1);
for (Type type : Type.values()) {
spinnerAdapter.add(type.name());
}
spinner.setAdapter(spinnerAdapter);
spinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
ultimateRecyclerView.setItemAnimator(Type.values()[position].getAnimator());
ultimateRecyclerView.getItemAnimator().setAddDuration(300);
ultimateRecyclerView.getItemAnimator().setRemoveDuration(300);
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
});
findViewById(R.id.add).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
simpleRecyclerViewAdapter.insert("newly added item", 1);
}
});
findViewById(R.id.del).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
simpleRecyclerViewAdapter.remove(1);
}
});
// ultimateRecyclerView.addItemDecoration(
// new HorizontalDividerItemDecoration.Builder(this).build());
// ultimateRecyclerView.setCustomSwipeToRefresh();
// final StoreHouseHeader header = new StoreHouseHeader(this);
// // header.setPadding(0, 15, 0, 0);
// header.initWithString("Marshal Chen");
// // header.initWithStringArray(R.array.akta);
// ultimateRecyclerView.mPtrFrameLayout.setHeaderView(header);
// ultimateRecyclerView.mPtrFrameLayout.addPtrUIHandler(header);
// ultimateRecyclerView.mPtrFrameLayout.setPtrHandler(new PtrHandler() {
// @Override
// public boolean checkCanDoRefresh(PtrFrameLayout ptrFrameLayout, View view, View view2) {
// boolean canbePullDown = PtrDefaultHandler.checkContentCanBePulledDown(ptrFrameLayout, view, view2);
// return canbePullDown;
// @Override
// public void onRefreshBegin(PtrFrameLayout ptrFrameLayout) {
// ptrFrameLayout.postDelayed(new Runnable() {
// @Override
// public void run() {
// simpleRecyclerViewAdapter.insert("Refresh things", 0);
// // ultimateRecyclerView.scrollBy(0, -50);
// linearLayoutManager.scrollToPosition(0);
// ultimateRecyclerView.mPtrFrameLayout.refreshComplete();
// }, 1800);
}
private void toggleSelection(int position) {
simpleRecyclerViewAdapter.toggleSelection(position);
actionMode.setTitle("Selected " + "1");
}
@Override
protected void onDestroy() {
super.onDestroy();
}
public int getScreenHeight() {
return findViewById(android.R.id.content).getHeight();
}
@Override
public boolean onCreateActionMode(ActionMode mode, Menu menu) {
URLogs.d("actionmode---" + (mode == null));
mode.getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
// return false;
}
/**
* Called to refresh an action mode's action menu whenever it is invalidated.
*
* @param mode ActionMode being prepared
* @param menu Menu used to populate action buttons
* @return true if the menu or action mode was updated, false otherwise.
*/
@Override
public boolean onPrepareActionMode(ActionMode mode, Menu menu) {
// swipeToDismissTouchListener.setEnabled(false);
this.actionMode = mode;
return false;
}
@Override
public boolean onActionItemClicked(ActionMode mode, MenuItem item) {
return false;
}
@Override
public void onDestroyActionMode(ActionMode mode) {
this.actionMode = null;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_bottom) {
Intent intent = new Intent(this, MultiViewTypesActivity.class);
startActivity(intent);
return true;
} else if (id == R.id.action_custom) {
Intent intent = new Intent(this, CustomSwipeToRefreshRefreshActivity.class);
startActivity(intent);
return true;
} else if (id == R.id.admob) {
Intent intent = new Intent(this, DragActivity.class);
startActivity(intent);
return true;
} else if (id == R.id.swipe_and_drag) {
Intent intent = new Intent(this, SwipeListViewExampleActivity.class);
startActivity(intent);
return true;
}
return super.onOptionsItemSelected(item);
}
enum Type {
FadeIn(new FadeInAnimator()),
FadeInDown(new FadeInDownAnimator()),
FadeInUp(new FadeInUpAnimator()),
FadeInLeft(new FadeInLeftAnimator()),
FadeInRight(new FadeInRightAnimator()),
Landing(new LandingAnimator()),
ScaleIn(new ScaleInAnimator()),
ScaleInTop(new ScaleInTopAnimator()),
ScaleInBottom(new ScaleInBottomAnimator()),
ScaleInLeft(new ScaleInLeftAnimator()),
ScaleInRight(new ScaleInRightAnimator()),
FlipInTopX(new FlipInTopXAnimator()),
FlipInBottomX(new FlipInBottomXAnimator()),
FlipInLeftY(new FlipInLeftYAnimator()),
FlipInRightY(new FlipInRightYAnimator()),
SlideInLeft(new SlideInLeftAnimator()),
SlideInRight(new SlideInRightAnimator()),
SlideInDown(new SlideInDownAnimator()),
SlideInUp(new SlideInUpAnimator()),
OvershootInRight(new OvershootInRightAnimator()),
OvershootInLeft(new OvershootInLeftAnimator());
private BaseItemAnimator mAnimator;
Type(BaseItemAnimator animator) {
mAnimator = animator;
}
public BaseItemAnimator getAnimator() {
return mAnimator;
}
}
}
|
package org.mifos.test.acceptance.new_group_loan;
import java.util.ArrayList;
import java.util.List;
import org.joda.time.DateTime;
import org.junit.Assert;
import org.mifos.test.acceptance.framework.MifosPage;
import org.mifos.test.acceptance.framework.UiTestCaseBase;
import org.mifos.test.acceptance.framework.loan.CreateLoanAccountSearchParameters;
import org.mifos.test.acceptance.framework.loan.CreateLoanAccountSubmitParameters;
import org.mifos.test.acceptance.framework.loan.EditLoanAccountInformationPage;
import org.mifos.test.acceptance.framework.loan.EditLoanAccountInformationParameters;
import org.mifos.test.acceptance.framework.loan.GLIMClient;
import org.mifos.test.acceptance.framework.loan.LoanAccountPage;
import org.mifos.test.acceptance.framework.testhelpers.CustomPropertiesHelper;
import org.mifos.test.acceptance.framework.testhelpers.LoanTestHelper;
import org.mifos.test.acceptance.remote.DateTimeUpdaterRemoteTestingService;
import org.springframework.test.context.ContextConfiguration;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@ContextConfiguration(locations = {"classpath:ui-test-context.xml"})
@Test(singleThreaded = true, enabled = true,groups = {"acceptance", "loan", "no_db_unit"})
@SuppressWarnings("PMD.SignatureDeclareThrowsException")
public class ModifyingGroupLoanTest extends UiTestCaseBase {
private LoanTestHelper loanTestHelper;
private CustomPropertiesHelper customPropertiesHelper;
@Override
@BeforeMethod(alwaysRun=true)
public void setUp() throws Exception {
super.setUp();
loanTestHelper = new LoanTestHelper(selenium);
customPropertiesHelper = new CustomPropertiesHelper(selenium);
customPropertiesHelper.setNewGroupLoanWithMembers(true);
}
@AfterMethod
public void logOut(){
customPropertiesHelper.setNewGroupLoanWithMembers(false);
(new MifosPage(selenium)).logout();
}
public void editAccountInfoTest() throws Exception{
DateTimeUpdaterRemoteTestingService dateTimeUpdaterRemoteTestingService = new DateTimeUpdaterRemoteTestingService(selenium);
DateTime targetTime = new DateTime(2013, 02, 9, 13, 0, 0, 0);
dateTimeUpdaterRemoteTestingService.setDateTime(targetTime);
CreateLoanAccountSearchParameters searchParameters = new CreateLoanAccountSearchParameters();
searchParameters.setSearchString("Default Group");
searchParameters.setLoanProduct("WeeklyGroupFlatLoanWithOnetimeFee");
List<GLIMClient> glimClients = new ArrayList<GLIMClient>();
glimClients.add(new GLIMClient(0, "Stu1233266299995 Client1233266299995 Client Id: 0002-000000012", "1500", null));
glimClients.add(new GLIMClient(1, "Stu1233266309851 Client1233266309851 Client Id: 0002-000000013", "37", null));
glimClients.add(new GLIMClient(2, "Stu1233266319760 Client1233266319760 Client Id: 0002-000000014", "1500", null));
LoanAccountPage loanAccountPage = loanTestHelper.createGroupLoanAccount(searchParameters, glimClients);
EditLoanAccountInformationPage editLoanAccountInformationPage = loanAccountPage.navigateToEditAccountInformation();
editLoanAccountInformationPage.setAmount("3500");
editLoanAccountInformationPage.setInterestRate("33");
editLoanAccountInformationPage.setNumberOfInstallments("8");
EditLoanAccountInformationParameters editAccountParameters = new EditLoanAccountInformationParameters();
editAccountParameters.setPurposeOfLoan("0004-Ox/Buffalo");
editAccountParameters.setCollateralNotes("Test Edit new GLIM Loan");
editAccountParameters.setExternalID("1234");
editLoanAccountInformationPage.editAccountParams(new CreateLoanAccountSubmitParameters(), editAccountParameters);
loanAccountPage = editLoanAccountInformationPage.submitAndNavigateToAccountInformationPreviewPage().submitAndNavigateToLoanAccountPage();
verifyEditedAccountParameters(loanAccountPage);
verifyModifiedAccountParametersFromIndividualMemberAccount(loanAccountPage);
}
private void verifyEditedAccountParameters(LoanAccountPage loanAccountPage) {
Assert.assertTrue(selenium.isTextPresent("Edit account information"));
loanAccountPage.verifyInterestRate("33");
loanAccountPage.verifyLoanAmount("3500");
loanAccountPage.verifyNumberOfInstallments("8");
loanAccountPage.verifyPurposeOfLoan("0004-Ox/Buffalo");
loanAccountPage.verifyCollateralNotes("Test Edit new GLIM Loan");
loanAccountPage.verifyExternalId("1234");
}
private void verifyModifiedAccountParametersFromIndividualMemberAccount(LoanAccountPage loanAccountPage) {
loanAccountPage.navigateToIndividualLoanAccountPageFromPendingApprovalGroupLoan(1);
Assert.assertFalse(selenium.isTextPresent("Edit account information"));
loanAccountPage.verifyInterestRate("33");
loanAccountPage.verifyLoanAmount("42.6");
loanAccountPage.verifyNumberOfInstallments("8");
loanAccountPage.navigateBack();
}
}
|
package uk.ac.ebi.quickgo.annotation.service.search;
import uk.ac.ebi.quickgo.annotation.common.AnnotationFields;
import uk.ac.ebi.quickgo.annotation.common.AnnotationRepoConfig;
import uk.ac.ebi.quickgo.annotation.model.Annotation;
import uk.ac.ebi.quickgo.annotation.service.comm.rest.geneproduct.transformer.GeneProductNameInjector;
import uk.ac.ebi.quickgo.annotation.service.comm.rest.geneproduct.transformer.GeneProductSynonymsInjector;
import uk.ac.ebi.quickgo.annotation.service.comm.rest.ontology.transformer.annotation.OntologyNameInjector;
import uk.ac.ebi.quickgo.annotation.service.comm.rest.ontology.transformer.annotation.SlimResultsTransformer;
import uk.ac.ebi.quickgo.annotation.service.comm.rest.ontology.transformer.annotation.TaxonomyNameInjector;
import uk.ac.ebi.quickgo.annotation.service.comm.rest.ontology.transformer.completablevalue.EvidenceNameInjector;
import uk.ac.ebi.quickgo.annotation.service.converter.AnnotationDocConverterImpl;
import uk.ac.ebi.quickgo.common.SearchableField;
import uk.ac.ebi.quickgo.common.loader.DbXRefLoader;
import uk.ac.ebi.quickgo.common.validator.DbXRefEntityValidation;
import uk.ac.ebi.quickgo.rest.controller.ControllerValidationHelper;
import uk.ac.ebi.quickgo.rest.controller.ControllerValidationHelperImpl;
import uk.ac.ebi.quickgo.rest.model.CompletableValue;
import uk.ac.ebi.quickgo.rest.search.RequestRetrieval;
import uk.ac.ebi.quickgo.rest.search.SearchService;
import uk.ac.ebi.quickgo.rest.search.query.QueryRequestConverter;
import uk.ac.ebi.quickgo.rest.search.query.SortCriterion;
import uk.ac.ebi.quickgo.rest.search.request.converter.RESTFilterConverterFactory;
import uk.ac.ebi.quickgo.rest.search.results.QueryResult;
import uk.ac.ebi.quickgo.rest.search.results.config.FieldNameTransformer;
import uk.ac.ebi.quickgo.rest.search.results.transformer.*;
import uk.ac.ebi.quickgo.rest.search.solr.SolrQueryConverter;
import uk.ac.ebi.quickgo.rest.search.solr.SolrRequestRetrieval;
import uk.ac.ebi.quickgo.rest.search.solr.SolrRetrievalConfig;
import uk.ac.ebi.quickgo.rest.search.solr.UnsortedSolrQuerySerializer;
import uk.ac.ebi.quickgo.rest.service.ServiceRetrievalConfig;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import net.sf.ehcache.CacheManager;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.beans.DocumentObjectBinder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.cache.ehcache.EhCacheCacheManager;
import org.springframework.cache.ehcache.EhCacheManagerFactoryBean;
import org.springframework.context.annotation.*;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.FileSystemResource;
import org.springframework.data.solr.core.SolrTemplate;
import static java.util.Arrays.asList;
import static java.util.Collections.singletonList;
@Configuration
@Import({AnnotationRepoConfig.class})
@ComponentScan({"uk.ac.ebi.quickgo.annotation.service.search"})
@PropertySource("classpath:search.properties")
@EnableCaching
public class SearchServiceConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(SearchServiceConfig.class);
private static final int MAX_PAGE_RESULTS = 100;
private static final boolean DEFAULT_XREF_VALIDATION_IS_CASE_SENSITIVE = true;
private static final String COMMA = ",";
private static final String DEFAULT_UNSORTED_QUERY_FIELDS =
"assignedBy_unsorted,dbSubset_unsorted,evidenceCode_unsorted,goEvidence_unsorted," +
"goId_unsorted,geneProductId_unsorted,geneProductType_unsorted," +
"qualifier_unsorted,targetSet_unsorted,taxonId_unsorted";
private static final String DEFAULT_ANNOTATION_SEARCH_RETURN_FIELDS =
"id,geneProductId,qualifier,goId,goEvidence," +
"evidenceCode,reference,withFrom,taxonId,assignedBy,extensions,symbol,geneProductId";
private static final String SOLR_ANNOTATION_QUERY_REQUEST_HANDLER = "/query";
private static final String DEFAULT_DOWNLOAD_SORT_FIELDS = "rowNumber,id";
private static final int DEFAULT_DOWNLOAD_PAGE_SIZE = 500;
private static final String CACHE_CONFIG_FILE = "ehcache.xml";
@Value("${geneproduct.db.xref.valid.regexes}")
String xrefValidationRegexFile;
@Value("${geneproduct.db.xref.valid.casesensitive:" + DEFAULT_XREF_VALIDATION_IS_CASE_SENSITIVE + "}")
boolean xrefValidationCaseSensitive;
@Value("${annotation.terms.query.compatible.fields:" + DEFAULT_UNSORTED_QUERY_FIELDS + "}")
private String fieldsThatCanBeUnsorted;
@Value("${annotation.download.sort.fields:" + DEFAULT_DOWNLOAD_SORT_FIELDS + "}")
private String defaultDownloadSortFields;
@Value("${annotation.download.pageSize:" + DEFAULT_DOWNLOAD_PAGE_SIZE + "}")
private int downloadPageSize;
@Value("${search.wildcard.fields:}")
private String fieldsThatCanBeSearchedByWildCard;
@Value("${cache.config.path:" + CACHE_CONFIG_FILE + "}")
private String cacheConfigPath;
@Bean
public static PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer() {
return new PropertySourcesPlaceholderConfigurer();
}
@Bean
public SearchService<Annotation> annotationSearchService(
RequestRetrieval<Annotation> annotationSolrRequestRetrieval) {
return new SearchServiceImpl(annotationSolrRequestRetrieval);
}
@Bean
public RequestRetrieval<Annotation> annotationSolrRequestRetrieval(
SolrTemplate annotationTemplate,
QueryRequestConverter<SolrQuery> queryRequestConverter,
AnnotationCompositeRetrievalConfig annotationRetrievalConfig) {
SolrQueryResultConverter resultConverter = new SolrQueryResultConverter(
new DocumentObjectBinder(),
new AnnotationDocConverterImpl(),
annotationRetrievalConfig);
return new SolrRequestRetrieval<>(
annotationTemplate.getSolrClient(),
queryRequestConverter,
resultConverter,
annotationRetrievalConfig);
}
@Bean
public ControllerValidationHelper validationHelper() {
return new ControllerValidationHelperImpl(MAX_PAGE_RESULTS);
}
@Bean
public QueryRequestConverter<SolrQuery> annotationSolrQueryRequestConverter() {
Set<String> unsortedFields =
Stream.of(fieldsThatCanBeUnsorted.split(COMMA)).collect(Collectors.toSet());
Set<String> wildCardFields =
Stream.of(fieldsThatCanBeSearchedByWildCard.split(COMMA)).collect(Collectors.toSet());
return new SolrQueryConverter(
SOLR_ANNOTATION_QUERY_REQUEST_HANDLER,
new UnsortedSolrQuerySerializer(unsortedFields, wildCardFields));
}
/**
* Annotation retrieval config. Annotations searches don't use highlighting.
*
* @param annotationSearchSolrReturnedFields A list of fields that can be used for filtering.
* @return An instance of AnnotationCompositeRetrievalConfig which meets the requirements of the Configuration
* interfaces it extends.
*/
@Bean
public AnnotationCompositeRetrievalConfig annotationRetrievalConfig(
@Value("${search.return.fields:" + DEFAULT_ANNOTATION_SEARCH_RETURN_FIELDS + "}") String
annotationSearchSolrReturnedFields,
FieldNameTransformer fieldNameTransformer) {
return new AnnotationCompositeRetrievalConfig() {
@Override
public List<SortCriterion> getDownloadSortCriteria() {
return Stream.of(defaultDownloadSortFields.split(COMMA))
.map(downloadSortField -> new SortCriterion(downloadSortField, SortCriterion.SortOrder.ASC))
.collect(Collectors.toList());
}
@Override public int getDownloadPageSize() {
return downloadPageSize;
}
@Override
public Map<String, String> repo2DomainFieldMap() {
return fieldNameTransformer.getTransformations();
}
@Override
public List<String> getSearchReturnedFields() {
return asList(annotationSearchSolrReturnedFields.split(COMMA));
}
//Not called
@Override
public String getHighlightStartDelim() {
return "";
}
//Not called
@Override
public String getHighlightEndDelim() {
return "";
}
};
}
@Bean
public ResultTransformerChain<QueryResult<Annotation>> resultTransformerChain(
ExternalServiceResultsTransformer<QueryResult<Annotation>, Annotation> ontologyResultsTransformer,
ExternalServiceResultsTransformer<QueryResult<Annotation>, Annotation> geneProductResultsTransformer) {
ResultTransformerChain<QueryResult<Annotation>> transformerChain = new ResultTransformerChain<>();
transformerChain.addTransformer(new SlimResultsTransformer());
transformerChain.addTransformer(ontologyResultsTransformer);
transformerChain.addTransformer(geneProductResultsTransformer);
return transformerChain;
}
@Bean
public ExternalServiceResultsTransformer<QueryResult<Annotation>, Annotation> ontologyResultsTransformer
(RESTFilterConverterFactory converterFactory) {
List<ResponseValueInjector<Annotation>> responseValueInjectors = asList(
new OntologyNameInjector(),
new TaxonomyNameInjector());
return new ExternalServiceResultsTransformer<>(responseValueInjectors, queryResultMutator(converterFactory));
}
@Bean
public ExternalServiceResultsTransformer<QueryResult<Annotation>, Annotation> geneProductResultsTransformer
(RESTFilterConverterFactory converterFactory) {
List<ResponseValueInjector<Annotation>> responseValueInjectors = asList(
new GeneProductNameInjector(),
new GeneProductSynonymsInjector());
return new ExternalServiceResultsTransformer<>(responseValueInjectors, queryResultMutator(converterFactory));
}
@Bean
public DbXRefEntityValidation geneProductValidator() {
return DbXRefEntityValidation.createWithData(geneProductLoader().load());
}
@Bean
public SearchableField annotationSearchableField() {
return new SearchableField() {
@Override
public boolean isSearchable(String field) {
return AnnotationFields.Searchable.isSearchable(field);
}
@Override
public Stream<String> searchableFields() {
return AnnotationFields.Searchable.searchableFields().stream();
}
};
}
@Bean
public ResultTransformerChain<CompletableValue> completableValueResultTransformerChain(
ExternalServiceResultsTransformer<CompletableValue, CompletableValue>
completableValueOntologyNameTransformer,
ExternalServiceResultsTransformer<CompletableValue, CompletableValue>
completableValueTaxonNameTransformer,
ExternalServiceResultsTransformer<CompletableValue, CompletableValue>
completableValueEvidenceNameTransformer) {
ResultTransformerChain<CompletableValue> transformerChain = new ResultTransformerChain<>();
transformerChain.addTransformer(completableValueOntologyNameTransformer);
transformerChain.addTransformer(completableValueTaxonNameTransformer);
transformerChain.addTransformer(completableValueEvidenceNameTransformer);
return transformerChain;
}
@Bean
public EhCacheCacheManager cacheManager(CacheManager cm) {
return new EhCacheCacheManager(cm);
}
@Bean
public EhCacheManagerFactoryBean ehCache() {
EhCacheManagerFactoryBean factoryBean = new EhCacheManagerFactoryBean();
try {
if (Objects.nonNull(cacheConfigPath)) {
FileSystemResource fileSystemResource = new FileSystemResource(cacheConfigPath);
if (fileSystemResource.exists() && fileSystemResource.isReadable()) {
factoryBean.setConfigLocation(fileSystemResource);
return factoryBean;
}
}
} catch (Exception e) {
LOGGER.error("Failed to load cache configuration file from " + cacheConfigPath);
}
//Failed to load config file, so use the version bundled with this jar
factoryBean.setConfigLocation(new ClassPathResource(CACHE_CONFIG_FILE));
return factoryBean;
}
@Bean
public NameService nameService(ResultTransformerChain<CompletableValue> completableValueResultTransformerChain) {
return new NameService(completableValueResultTransformerChain);
}
@Bean
public ExternalServiceResultsTransformer<CompletableValue, CompletableValue> completableValueOntologyNameTransformer
(RESTFilterConverterFactory converterFactory) {
List<ResponseValueInjector<CompletableValue>> responseValueInjectors =
singletonList(new uk.ac.ebi.quickgo.annotation.service.comm.rest.ontology.transformer
.completablevalue.OntologyNameInjector());
return new ExternalServiceResultsTransformer<>(responseValueInjectors,
completableValueResultMutator(converterFactory));
}
@Bean
public ValueInjectionToSingleResult<CompletableValue> completableValueResultMutator(
RESTFilterConverterFactory converterFactory) {
return new ValueInjectionToSingleResult<>(converterFactory);
}
@Bean
public ExternalServiceResultsTransformer<CompletableValue, CompletableValue> completableValueTaxonNameTransformer
(RESTFilterConverterFactory converterFactory) {
List<ResponseValueInjector<CompletableValue>> responseValueInjectors = singletonList(
new uk.ac.ebi.quickgo.annotation.service.comm.rest.ontology.transformer.completablevalue
.TaxonomyNameInjector());
return new ExternalServiceResultsTransformer<>(responseValueInjectors,
completableValueResultMutator(converterFactory));
}
@Bean
public ExternalServiceResultsTransformer<CompletableValue, CompletableValue> completableValueEvidenceNameTransformer
(RESTFilterConverterFactory converterFactory) {
List<ResponseValueInjector<CompletableValue>> responseValueInjectors = singletonList(
new EvidenceNameInjector());
return new ExternalServiceResultsTransformer<>(responseValueInjectors,
completableValueResultMutator(converterFactory));
}
private ValueInjectionToQueryResults<Annotation> queryResultMutator(
RESTFilterConverterFactory converterFactory) {
return new ValueInjectionToQueryResults<>(converterFactory);
}
private DbXRefLoader geneProductLoader() {
return new DbXRefLoader(this.xrefValidationRegexFile, xrefValidationCaseSensitive);
}
public interface AnnotationCompositeRetrievalConfig extends SolrRetrievalConfig, ServiceRetrievalConfig {
List<SortCriterion> getDownloadSortCriteria();
int getDownloadPageSize();
}
}
|
package info.nightscout.androidaps.plugins.PumpInsight.history;
import android.content.Intent;
import java.util.Date;
import sugar.free.sightparser.handling.HistoryBroadcast;
import static info.nightscout.androidaps.plugins.PumpInsight.history.PumpIdCache.updatePumpSerialNumber;
class HistoryIntentAdapter {
private HistoryLogAdapter logAdapter = new HistoryLogAdapter();
private static Date getDateExtra(Intent intent, String name) {
return (Date) intent.getSerializableExtra(name);
}
private static void log(String msg) {
android.util.Log.e("HistoryIntentAdapter", msg);
}
static long getRecordUniqueID(long pump_serial_number, long pump_record_id) {
updatePumpSerialNumber(pump_serial_number);
return (pump_serial_number * 10000000) + pump_record_id;
}
void processTBRIntent(Intent intent) {
final int pump_tbr_duration = intent.getIntExtra(HistoryBroadcast.EXTRA_DURATION, -1);
final int pump_tbr_percent = intent.getIntExtra(HistoryBroadcast.EXTRA_TBR_AMOUNT, -1);
long pump_record_id = intent.getLongExtra(HistoryBroadcast.EXTRA_EVENT_NUMBER, -1);
if (pump_record_id == -1) {
pump_record_id = intent.getIntExtra(HistoryBroadcast.EXTRA_EVENT_NUMBER, -1);
}
final long pump_serial_number = Long.parseLong(intent.getStringExtra(HistoryBroadcast.EXTRA_PUMP_SERIAL_NUMBER));
final Date event_time = getDateExtra(intent, HistoryBroadcast.EXTRA_EVENT_TIME);
final Date start_time = getDateExtra(intent, HistoryBroadcast.EXTRA_START_TIME);
if ((pump_tbr_duration == -1) || (pump_tbr_percent == -1) || (pump_record_id == -1)) {
log("Invalid TBR record!!!");
return;
}
final long record_unique_id = getRecordUniqueID(pump_serial_number, pump_record_id);
// other sanity checks
if ((pump_tbr_percent == 90) && (pump_tbr_duration <= 1)) {
log("Not creating TBR record for faux cancel");
} else {
log("Creating TBR record: " + pump_tbr_percent + "% " + pump_tbr_duration + "m" + " id:" + record_unique_id);
logAdapter.createTBRrecord(start_time, pump_tbr_percent, pump_tbr_duration, record_unique_id);
}
}
void processDeliveredBolusIntent(Intent intent) {
final String bolus_type = intent.getStringExtra(HistoryBroadcast.EXTRA_BOLUS_TYPE);
final int bolus_id = intent.getIntExtra(HistoryBroadcast.EXTRA_BOLUS_ID, -1);
long pump_record_id = intent.getLongExtra(HistoryBroadcast.EXTRA_EVENT_NUMBER, -1);
if (pump_record_id == -1) {
pump_record_id = intent.getIntExtra(HistoryBroadcast.EXTRA_EVENT_NUMBER, -1);
}
final long pump_serial_number = Long.parseLong(intent.getStringExtra(HistoryBroadcast.EXTRA_PUMP_SERIAL_NUMBER));
final Date event_time = getDateExtra(intent, HistoryBroadcast.EXTRA_EVENT_TIME);
final Date start_time = getDateExtra(intent, HistoryBroadcast.EXTRA_START_TIME);
final float immediate_amount = intent.getFloatExtra(HistoryBroadcast.EXTRA_IMMEDIATE_AMOUNT, -1);
final float extended_insulin = intent.getFloatExtra(HistoryBroadcast.EXTRA_EXTENDED_AMOUNT, -1);
final int extended_minutes = intent.getIntExtra(HistoryBroadcast.EXTRA_DURATION, -1);
final long record_unique_id = getRecordUniqueID(pump_serial_number, bolus_id > -1 ? bolus_id : pump_record_id);
switch (bolus_type) {
case "STANDARD":
if (immediate_amount == -1) {
log("ERROR Standard bolus fails sanity check");
return;
}
LiveHistory.setStatus(bolus_type + " BOLUS\n" + immediate_amount + "U ", event_time.getTime());
logAdapter.createStandardBolusRecord(start_time, immediate_amount, record_unique_id);
break;
case "EXTENDED":
if ((extended_insulin == -1) || (extended_minutes == -1)) {
log("ERROR: Extended bolus fails sanity check");
return;
}
LiveHistory.setStatus(bolus_type + " BOLUS\n" + extended_insulin + "U over " + extended_minutes + " min, ", event_time.getTime());
logAdapter.createExtendedBolusRecord(start_time, extended_insulin, extended_minutes, record_unique_id);
break;
case "MULTIWAVE":
if ((immediate_amount == -1) || (extended_insulin == -1) || (extended_minutes == -1)) {
log("ERROR: Multiwave bolus fails sanity check");
return;
}
LiveHistory.setStatus(bolus_type + " BOLUS\n" + immediate_amount + "U + " + extended_insulin + "U over " + extended_minutes + " min, ", event_time.getTime());
logAdapter.createStandardBolusRecord(start_time, immediate_amount, pump_serial_number + pump_record_id);
logAdapter.createExtendedBolusRecord(start_time, extended_insulin, extended_minutes, record_unique_id);
break;
default:
log("ERROR, UNKNWON BOLUS TYPE: " + bolus_type);
}
}
}
|
package me.readhub.android.md.ui.listener;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.support.annotation.NonNull;
import android.support.v4.view.animation.FastOutLinearInInterpolator;
import android.support.v4.view.animation.LinearOutSlowInInterpolator;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.TypedValue;
import android.view.View;
import android.view.animation.Interpolator;
public final class BackToTopAndRefreshButtonBehaviorListener {
private BackToTopAndRefreshButtonBehaviorListener() {}
public static class ForRecyclerView extends RecyclerView.OnScrollListener {
private static final int SHOW_HIDE_ANIM_DURATION = 200;
private static final Interpolator SHOW_ANIM_INTERPOLATOR = new LinearOutSlowInInterpolator();
private static final Interpolator HIDE_ANIM_INTERPOLATOR = new FastOutLinearInInterpolator();
private static final int ANIM_STATE_SHOWING = 0;
private static final int ANIM_STATE_SHOWN = 1;
private static final int ANIM_STATE_HIDING = 2;
private static final int ANIM_STATE_HIDDEN = 3;
private final View button;
private final float movingDistance;
private int animState = ANIM_STATE_HIDDEN;
public ForRecyclerView(@NonNull View button) {
this.button = button;
movingDistance = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 64.0f, button.getResources().getDisplayMetrics());
button.setTranslationY(-movingDistance);
}
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
updateState(recyclerView, 0);
}
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
updateState(recyclerView, dy);
}
private void updateState(@NonNull RecyclerView recyclerView, int dy) {
RecyclerView.LayoutManager layoutManager = recyclerView.getLayoutManager();
if (layoutManager instanceof LinearLayoutManager) {
if (((LinearLayoutManager) layoutManager).findFirstVisibleItemPosition() == 0) {
hide();
} else {
if (dy > 0) {
show();
} else if (dy < 0) {
hide();
}
}
} else {
hide();
}
}
private boolean isOrWillBeShown() {
return animState == ANIM_STATE_SHOWING || animState == ANIM_STATE_SHOWN;
}
private boolean isOrWillBeHidden() {
return animState == ANIM_STATE_HIDING || animState == ANIM_STATE_HIDDEN;
}
private void show() {
if (isOrWillBeShown()) {
return;
}
button.animate().cancel();
animState = ANIM_STATE_SHOWING;
button.animate()
.alpha(1.0f)
.translationY(0.0f)
.setDuration(SHOW_HIDE_ANIM_DURATION)
.setInterpolator(SHOW_ANIM_INTERPOLATOR)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
animState = ANIM_STATE_SHOWN;
}
});
}
private void hide() {
if (isOrWillBeHidden()) {
return;
}
button.animate().cancel();
animState = ANIM_STATE_HIDING;
button.animate()
.alpha(0.0f)
.translationY(-movingDistance)
.setDuration(SHOW_HIDE_ANIM_DURATION)
.setInterpolator(HIDE_ANIM_INTERPOLATOR)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
animState = ANIM_STATE_HIDDEN;
}
});
}
}
}
|
package com.axelor.apps.hr.service.project;
import java.math.BigDecimal;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import com.axelor.apps.base.service.administration.GeneralService;
import com.axelor.apps.hr.db.Employee;
import com.axelor.apps.hr.db.TimesheetLine;
import com.axelor.apps.hr.service.employee.EmployeeService;
import com.axelor.apps.project.db.ProjectTask;
import com.axelor.inject.Beans;
import com.google.inject.Inject;
import com.google.inject.persist.Transactional;
public class ProjectTaskServiceImpl implements ProjectTaskService {
@Inject
protected GeneralService generalService;
@Inject
protected EmployeeService employeeService;
@Transactional(rollbackOn={Exception.class})
public List<TimesheetLine> computeVisibleDuration(ProjectTask project){
List<TimesheetLine> timesheetLineList = project.getTimesheetLineList();
Employee timesheetEmployee;
BigDecimal employeeDailyWorkHours;
BigDecimal employeeDailyWorkHoursGeneral = generalService.getGeneral().getDailyWorkHours();
for(TimesheetLine timesheetLine : timesheetLineList){
timesheetEmployee = timesheetLine.getUser().getEmployee();
if (timesheetEmployee == null || timesheetEmployee.getDailyWorkHours() == null)
employeeDailyWorkHours = employeeDailyWorkHoursGeneral;
else
employeeDailyWorkHours = timesheetEmployee.getDailyWorkHours();
timesheetLine.setVisibleDuration(employeeService.getUserDuration(timesheetLine.getDurationStored(), employeeDailyWorkHours, false));
}
timesheetLineList = _sortTimesheetLineByDate(timesheetLineList);
return timesheetLineList;
}
public List<TimesheetLine> _sortTimesheetLineByDate(List<TimesheetLine> timesheetLineList){
Collections.sort(timesheetLineList, new Comparator<TimesheetLine>() {
@Override
public int compare(TimesheetLine tsl1, TimesheetLine tsl2) {
if(tsl1.getDate().isAfter(tsl2.getDate()))
return 1;
else if(tsl1.getDate().isBefore(tsl2.getDate()))
return -1;
else
return 0;
}
});
return timesheetLineList;
}
}
|
package org.ovirt.engine.core.common.config;
import java.util.HashMap;
import java.util.Map;
import org.ovirt.engine.core.common.EngineWorkingMode;
public enum ConfigValues {
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("9b9002d1-ec33-4083-8a7b-31f6b8931648")
AdUserId(0),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("SQLServer")
DBEngine(1),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("5")
PSAsyncActionTimeOutInMinutes(2),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("VdcDbConnection")
DefaultDataBaseName(4),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("administrator")
AdUserName(5),
@TypeConverterAttribute(Map.class)
@DefaultValueAttribute("EXAMPLE.COM:123456")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.DomainsPasswordMap)
AdUserPassword(8),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("123456")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.Password)
LocalAdminPassword(9),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("EXAMPLE.COM")
DomainName(10),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("180")
vdsTimeout(11),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("2")
VdsRefreshRate(12),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10")
AsyncTaskPollingRate(13),
/**
* The rate (in seconds) to refresh the cache that holds the asynchronous tasks' statuses.
*/
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("60")
AsyncTaskStatusCacheRefreshRateInSeconds(15),
/**
* The period of time (in minutes) to hold the asynchronous tasks' statuses in the asynchronous tasks cache.
*/
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("5")
AsyncTaskStatusCachingTimeInMinutes(16),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3000")
AsyncTaskZombieTaskLifeInMinutes(17),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3600")
UserRefreshRate(18),
@TypeConverterAttribute(java.util.Date.class)
@DefaultValueAttribute("03:35:35")
AuditLogCleanupTime(19),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
AuditLogAgingThreashold(20),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("00:1A:4A:16:72:XX")
MigrationMinPort(21),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("100")
MigrationPortRange(22),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("drac5,ilo,ipmilan,rsa,bladecenter,alom,apc,eps,wti,rsb")
VdsFenceType(24),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("drac5:secure=yes,ilo:ssl=yes,ipmilan:,rsa:secure=yes,bladecenter:secure=yes,alom:secure=yes,apc:secure=yes,eps:,wti:secure=yes,rsb:")
VdsFenceOptions(25),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("alom:secure=secure,port=ipport;apc:secure=secure,port=ipport,slot=port;bladecenter:secure=secure,port=ipport,slot=port;drac5:secure=secure,port=ipport;eps:slot=port;ilo:secure=ssl,port=ipport;ipmilan:;rsa:secure=secure,port=ipport;rsb:;wti:secure=secure,port=ipport,slot=port")
VdsFenceOptionMapping(26),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("secure=bool,port=int,slot=int")
VdsFenceOptionTypes(27),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
FenceStopStatusRetries(28),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("60")
FenceStopStatusDelayBetweenRetriesInSec(29),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("180")
FenceQuietTimeBetweenOperationsInSec(30),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("/data/updates/ovirt-node-image.iso")
oVirtUploadPath(32),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("/usr/share/rhev-hypervisor")
oVirtISOsRepositoryPath(33),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("/usr/share/vdsm-reg/vdsm-upgrade")
oVirtUpgradeScriptName(34),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("5")
VdsCertificateValidityInYears(38),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
RemoteBackend(39),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
RemoteUserBackend(40),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("Remoting")
RemoteInterface(41),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("AutoBackend_tcp")
AutoRemoteInterface(42),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
RemoteUri(43),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("USERID")
UserId(44),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("PASSW0RD")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.Password)
Password(45),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
IncludeDesktop(47),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("100")
SearchResultsLimit(48),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("2")
VDSAttemptsToResetCount(49),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
TimeoutToResetVdsInSeconds(50),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ProductKey2003(52),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ProductKey2003x64(53),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ProductKey2008(54),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ProductKey2008x64(55),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ProductKey2008R2(56),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ProductKeyWindow7(57),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ProductKeyWindow7x64(58),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ProductKey(59),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10")
FreeSpaceLow(60),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("5")
FreeSpaceCriticalLowInGB(61),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
MacPoolRanges(62),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
HasCluster(63),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("1.0.0.0")
VdcVersion(64),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
SSLEnabled(65),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("DEFAULT")
CipherSuite(66),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10")
MinVmDiskSize(67),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("250")
MaxVmDiskSize(68),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("40")
StoragePoolNameSizeLimit(69),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("50")
StorageDomainNameSizeLimit(70),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
ImageCheckFailureMessageTimout(71),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
SlowStorageResponseMessageTimout(72),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
NumberOfFailedRunsOnVds(73),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
TimeToReduceFailedRunOnVdsInMinutes(74),
/**
* In default rerun Vm on all Available desktops
*/
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
MaxRerunVmOnVdsCount(75),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
SysPrepXPPath(79),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
SysPrep2K3Path(80),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
SysPrep2K8Path(81),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
SysPrep2K8x64Path(82),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
SysPrep2K8R2Path(83),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
SysPrepWindows7Path(84),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
SysPrepWindows7x64Path(85),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("7200")
AutoPostbackDelay(86),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("1000")
MaxVmsInPool(87),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("5")
VmPoolLeaseDays(88),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("12:00")
VmPoolLeaseStartTime(89),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("12:00")
VmPoolLeaseEndTime(90),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("LDAP")
AuthenticationMethod(92),
@Reloadable
@TypeConverterAttribute(java.util.List.class)
@DefaultValueAttribute("1,2,4")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.CommaSeparatedStringArray)
ValidNumOfMonitors(93),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("16")
MaxNumOfVmCpus(94),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("16")
MaxNumOfVmSockets(95),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("16")
MaxNumOfCpuPerSocket(96),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("1")
NumberVmRefreshesBeforeSave(97),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("60")
AutoRepoDomainRefreshTime(99),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("certs/ca.pem")
CACertificatePath(100),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("ca")
CABaseDirectory(101),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("certs/engine.cer")
CertificateFileName(102),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.Password)
CertificatePassword(103),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
InstallVds(108),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
EnableUSBAsDefault(110),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("300")
SSHInactivityTimoutSeconds(111),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("120")
ServerRebootTimeout(112),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("40")
VmGracefulShutdownTimeout(113),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("100")
VmPriorityMaxValue(114),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("Shutting Down")
VmGracefulShutdownMessage(115),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
SearchesRefreshRateInSeconds(116),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("smain,sinputs,scursor,splayback,srecord,sdisplay,ssmartcard,susbredir")
SpiceSecureChannels(117),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("shift+f12")
SpiceReleaseCursorKeys(119),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("shift+f11")
SpiceToggleFullScreenKeys(120),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
SpiceUsbAutoShare(121),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
UseSecureConnectionWithServers(123),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("500")
MaxResultsPageSize(124),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("oVirt")
OrganizationName(125),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
IsMultilevelAdministrationOn(127),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
VdsRecoveryTimeoutInMintues(128),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("8192")
MaxBlockDiskSize(129),
// the order is- {level}:{name}:{flags}:{vdsm};
// {level}:{name}:{flags}:{vdsm};1:cpu_name:cpu_flags,..,:vdsm_exec,+..,-..;..
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("1:pentium3:vmx:pentium3;2:intel-qemu64-nx:vmx,sse2:qemu64,-nx,+sse2;3:intel-qemu64:vmx,sse2,nx:qemu64,+sse2;2:amd-qemu64-nx:svm,sse2:qemu64,-nx,+sse2;3:amd-qemu64:svm,sse2,nx:qemu64,+sse2")
ServerCPUList(130),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
UseVdsBrokerInProc(131),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("RHEV-Agent")
AgentAppName(132),
@Reloadable
@TypeConverterAttribute(Map.class)
@DefaultValueAttribute("{\"windows\":\"RHEV-Spice\",\"linux\":\"xorg-x11-drv-qxl\"}")
SpiceDriverNameInGuest(133),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("RHEV-toolsSetup_")
GuestToolsSetupIsoPrefix(134),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10")
VcpuConsumptionPercentage(135),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("OvfMetaFiles\\")
OvfDirectoryPath(136),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("/images/export/")
ExportVmDefaultPath(137),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("/images/import/")
ImportDefaultPath(138),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("None")
VdsSelectionAlgorithm(139),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
EnableVdsLoadBalancing(140),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("1")
VdsLoadBalancingeIntervalInMinutes(141),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("0")
LowUtilizationForEvenlyDistribute(142),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("6")
LowUtilizationForPowerSave(143),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10")
HighUtilizationForEvenlyDistribute(144),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10")
HighUtilizationForPowerSave(145),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("80")
UtilizationThresholdInPercent(146),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("2")
CpuOverCommitDurationMinutes(147),
// a default of 120% memory over commit.
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("120")
MaxVdsMemOverCommit(148),
// a default of 120% memory over commit.
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("120")
MaxVdsMemOverCommitForServers(149),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("1")
PowerClientMaxNumberOfConcurrentVMs(151),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
PowerClientAutoMigrateToPowerClientOnConnect(153),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
PowerClientAutoMigrateFromPowerClientToVdsWhenConnectingFromRegularClient(154),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
PowerClientAutoMigrateCheckOnRDP(155),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
PowerClientSpiceDynamicCompressionManagement(156),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
PowerClientAutoAdjustMemory(157),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
PowerClientAutoAdjustMemoryBaseOnAvailableMemory(158),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
PowerClientAutoAdjustMemoryLog(159),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("256")
PowerClientAutoAdjustMemoryGeneralReserve(160),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("100")
PowerClientAutoAdjustMemorySpicePerSessionReserve(161),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("50")
PowerClientAutoAdjustMemorySpicePerMonitorReserve(162),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3712")
PowerClientAutoAdjustMemoryMaxMemory(163),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("64")
PowerClientAutoAdjustMemoryModulus(164),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
PowerClientAutoInstallCertificateOnApprove(165),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
PowerClientAllowRunningGuestsWithoutTools(166),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
PowerClientLogDetection(167),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
DebugTimerLogging(169),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
DebugSearchLogging(170),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
PowerClientAutoApprovePatterns(171),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("99408929-82CF-4DC7-A532-9D998063FA95")
PowerClientAutoRegistrationDefaultVdsGroupID(172),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
PowerClientRunVmShouldVerifyPendingVMsAsWell(173),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
PowerClientDedicatedVmLaunchOnVdsWhilePowerClientStarts(174),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
PowerClientUserPortalVdcManagedSpiceState(178),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
StoragePoolRefreshTimeInSeconds(179),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
StoragePoolNonOperationalResetTimeoutInMin(180),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("1")
StorageDomainFalureTimeoutInMinutes(181),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ComputerADPaths(182),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ENMailHost(183),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("-1")
ENMailPort(184),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ENMailUser(185),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.Password)
ENMailPassword(186),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
ENMailEnableSsl(189),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("System,Sparse,COW,true;Data,Preallocated,RAW,false;Shared,Preallocated,RAW,false;Swap,Preallocated,RAW,false;Temp,Sparse,COW,false")
DiskConfigurationList(191),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
SPMFailOverAttempts(192),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("ON")
LockPolicy(193),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("5")
LockRenewalIntervalSec(194),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
LeaseTimeSec(195),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10")
IoOpTimeoutSec(196),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
LeaseRetries(197),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("en-us")
VncKeyboardLayout(203),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
SpmCommandFailOverRetries(204),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("1")
SpmVCpuConsumption(205),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
RedirectServletReportsPage(251),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
EnableSpiceRootCertificateValidation(206),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("100000")
MaxMacsCountInPool(207),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10")
NumberOfVmsForTopSizeVms(208),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("256")
VMMinMemorySizeInMB(210),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("20480")
VM32BitMaxMemorySizeInMB(211),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("262144")
VM64BitMaxMemorySizeInMB(212),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("0")
BlockMigrationOnSwapUsagePercentage(213),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
EnableSwapCheck(214),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
SendSMPOnRunVm(215),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("pc")
EmulatedMachine(216),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute(" WHERE RowNum BETWEEN %1$s AND %2$s")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.ValueDependent, dependentOn = ConfigValues.DBEngine,
realValue = "PagingSyntax")
DBPagingSyntax(217),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("Range")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.ValueDependent, dependentOn = ConfigValues.DBEngine,
realValue = "PagingType")
DBPagingType(218),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("SELECT * FROM (SELECT *, ROW_NUMBER() OVER(%1$s) as RowNum FROM (%2$s)) as T1 ) as T2 %3$s")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.ValueDependent, dependentOn = ConfigValues.DBEngine,
realValue = "SearchTemplate")
DBSearchTemplate(219),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute(" OFFSET {0} LIMIT {1}")
PostgresPagingSyntax(223), // used by behaviour DBPagingSyntax
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("Offset")
PostgresPagingType(224), // used by behaviour DBPagingType
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("SELECT * FROM ( {1}) as T1 {2}")
PostgresSearchTemplate(225), // used by behaviour DBSearchTemplate
@Reloadable
@TypeConverterAttribute(java.util.HashSet.class)
@DefaultValueAttribute("4.4,4.5")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.CommaSeparatedVersionArray)
SupportedVDSMVersions(227),
@TypeConverterAttribute(java.util.HashSet.class)
@DefaultValueAttribute("2.2,3.0")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.CommaSeparatedVersionArray)
SupportedClusterLevels(228),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("ENGINE")
OvfVirtualSystemType(229),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("60")
WaitForVdsInitInSec(230),
// JTODO - temporarily using values from 256 for Java specific options
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("keys/engine.p12")
keystoreUrl(256),
// TODO: handle password behavior
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("NoSoup4U")
// @OptionBehaviourAttribute(behaviour = OptionBehaviour.Password)
keystorePass(257),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute(".truststore")
TruststoreUrl(258),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("NoSoup4U")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.Password)
TruststorePass(259),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("(GMT) GMT Standard Time")
DefaultTimeZone(260),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
AsyncPollingCyclesBeforeRefreshSuspend(261),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("120")
AsyncPollingCyclesBeforeCallbackCleanup(262),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("389")
LDAPServerPort(263),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("SignReq.bat")
SignScriptName(264),
// PKCS#12 store contains only one key
// Alias is almost always "1"
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("1")
CertAlias(265),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
SANWipeAfterDelete(267),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("/etc/ovirt-engine")
ConfigDir(267),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("/usr/share/ovirt-engine")
DataDir(268),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
UserSessionTimeOutInterval(269),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("/data/images/rhev")
RhevhLocalFSPath(290),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("default:GSSAPI")
LDAPSecurityAuthentication(271),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
UserDefinedVMProperties(272),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
PredefinedVMProperties(273),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("250")
MaxNumberOfHostsInStoragePool(274),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("15")
MaxVmNameLengthWindows(276),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("64")
MaxVmNameLengthNonWindows(277),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("255")
MaxVdsNameLength(278),
@Reloadable
@TypeConverterAttribute(Double.class)
@DefaultValueAttribute("30")
MaxStorageVdsTimeoutCheckSec(279),
@Reloadable
@TypeConverterAttribute(Double.class)
@DefaultValueAttribute("5")
MaxStorageVdsDelayCheckSec(280),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("300")
DisableFenceAtStartupInSec(281),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("60")
NicDHCPDelayGraceInMS(282),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
FindFenceProxyRetries(283),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
FindFenceProxyDelayBetweenRetriesInSec(284),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("1024")
LogPhysicalMemoryThresholdInMB(285),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("auth-conf")
SASL_QOP(286),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("%JBOSS_HOME%\\standalone\\deployments\\engine.ear")
ENGINEEarLib(287),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("1000")
LdapQueryPageSize(288),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("100")
MaxLDAPQueryPartsNumber(289),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
FenceStartStatusRetries(290),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("60")
FenceStartStatusDelayBetweenRetriesInSec(291),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
LDAPQueryTimeout(292),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("default,ich6")
DesktopAudioDeviceType(294),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("0")
SupportedStorageFormats(296),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("ILIKE")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.ValueDependent, dependentOn = ConfigValues.DBEngine,
realValue = "LikeSyntax")
DBLikeSyntax(298),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("ILIKE")
PostgresLikeSyntax(300), // used by behaviour DBLikeSyntax
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.ValueDependent, dependentOn = ConfigValues.DBEngine,
realValue = "I18NPrefix")
DBI18NPrefix(301),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("N")
SQLI18NPrefix(302),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
PostgresI18NPrefix(303), // used by behaviour DBI18NPrefix
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("60000")
UknownTaskPrePollingLapse(304),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
LdapServers(305),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("/var/lock/ovirt-engine/.openssl.exclusivelock")
SignLockFile(306),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("3.0.0.0")
ProductRPMVersion(307),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10000")
MaxAuditLogMessageLength(308),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
SysPrepDefaultUser(309),
@Reloadable
@TypeConverterAttribute(String.class)
@OptionBehaviourAttribute(behaviour = OptionBehaviour.Password)
@DefaultValueAttribute("")
SysPrepDefaultPassword(310),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("ilo3=ipmilan")
FenceAgentMapping(311),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("ilo3:lanplus,power_wait=4")
FenceAgentDefaultParams(312),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("admin")
AdminUser(313),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("internal")
AdminDomain(314),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.Password)
AdminPassword(315),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
SignCertTimeoutInSeconds(316),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("600")
OtpExpirationInSeconds(317),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("20")
ConnectToServerTimeoutInSeconds(318),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
IPTablesConfig(319),
/**
* Lower threshold for disk space on host to be considered low, in MB.
*/
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("1000")
VdsLocalDisksLowFreeSpace(321),
/**
* Lower threshold for disk space on host to be considered critically low (almost out of space), in MB.
*/
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("500")
VdsLocalDisksCriticallyLowFreeSpace(322),
/**
* The minimal size of the internal thread pool. Minimal number of threads in pool
*/
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("50")
DefaultMinThreadPoolSize(323),
/**
* The size of the internal thread pool
*/
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("500")
DefaultMaxThreadPoolSize(324),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("1")
InitStorageSparseSizeInGB(326),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("ovirtmgmt")
ManagementNetwork(328),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("5.8")
OvirtInitialSupportedIsoVersion(329),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("rhevh")
OvirtIsoPrefix(330),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("80")
QuotaThresholdVdsGroup(331),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("80")
QuotaThresholdStorage(332),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("20")
QuotaGraceVdsGroup(333),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("20")
QuotaGraceStorage(334),
/**
* The base URL for the documentation web-site
*/
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
DocsURL(335),
// This value indicates devices that although are given to us by VDSM
// are still treated as managed devices
// This should be a [device=<device> type=<type>[,]]* string
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ManagedDevicesWhiteList(336),
/**
* The origin type to be used for VM and VM template creation
*/
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("OVIRT")
OriginType(336),
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
HotPlugEnabled(337),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
HotPlugUnsupportedOsList(338),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
AllowDuplicateMacAddresses(339),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
SetupNetworksPollingTimeout(340),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10")
JobCleanupRateInMinutes(341),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10")
SucceededJobCleanupTimeInMinutes(342),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("60")
FailedJobCleanupTimeInMinutes(343),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("100")
JobPageSize(344),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("50")
MaxCorrelationIdLength(345),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("5")
VmPoolMonitorIntervalInMinutes(344),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("5")
VmPoolMonitorBatchSize(345),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("0 0/5 * * * ?")
AutoRecoverySchedule(346),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
VmPoolMonitorMaxAttempts(347),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
LiveSnapshotEnabled(348),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("9000")
MaxMTU(349),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
PosixStorageEnabled(350),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
SendVmTicketUID(351),
@DefaultValueAttribute("")
@TypeConverterAttribute(String.class)
LDAPProviderTypes(352),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
AdvancedNFSOptionsEnabled(353),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("8192")
PayloadSize(354),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("255")
ApplicationMode(355),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("4")
NumberOfUSBSlots(356),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
NativeUSBEnabled(357),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
ShareableDiskEnabled(358),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
CpuPinningEnabled(359),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
DirectLUNDiskEnabled(360),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
FilteringLUNsEnabled(361),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("animation")
WANDisableEffects(362),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("16")
WANColorDepth(363),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("3")
VmPoolMaxSubsequentFailures(364),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
CpuPinMigrationEnabled(365),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
SupportForceCreateVG(366),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
NonVmNetworkSupported(367),
@TypeConverterAttribute(java.util.List.class)
@DefaultValueAttribute("0,2")
@OptionBehaviourAttribute(behaviour = OptionBehaviour.CommaSeparatedStringArray)
DisconnectPoolOnReconstruct(368),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("120")
NetworkConnectivityCheckTimeoutInSeconds(369),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
SupportBridgesReportByVDSM(370),
@Reloadable
@TypeConverterAttribute(Map.class)
@DefaultValueAttribute("{\"storage domains\":\"false\",\"hosts\":\"false\"}")
AutoRecoveryAllowedTypes(371),
/*
* umask is required to allow only self access
* tar is missing from vanilla fedora-18 so we use python
*/
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute(
"umask 0077; " +
"MYTMP=\"$(mktemp -t ovirt-XXXXXXXXXX)\"; " +
"trap \"chmod -R u+rwX \\\"${MYTMP}\\\" > /dev/null 2>&1; rm -fr \\\"${MYTMP}\\\" > /dev/null 2>&1\" 0; " +
"rm -fr \"${MYTMP}\" && " +
"mkdir \"${MYTMP}\" && " +
"python -c \"import sys, tarfile; tarfile.open(fileobj=sys.stdin, mode='r|').extractall(path='${MYTMP}')\" && " +
"@ENVIRONMENT@ \"${MYTMP}\"/setup DIALOG/dialect=str:machine DIALOG/customization=bool:True"
)
BootstrapCommand(373),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10000")
BootstrapCacheRefreshInterval(374),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("/usr/share/ovirt-host-deploy/interface-3")
BootstrapPackageDirectory(375),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("ovirt-host-deploy.tar")
BootstrapPackageName(376),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("ovirt-engine")
SSHKeyAlias(377),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
LDAPOperationTimeout(378),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("30")
LDAPConnectTimeout(379),
/*
* Whether to allow a cluster with both Virt and Gluster services enabled
*/
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
AllowClusterWithVirtGlusterEnabled(380),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
EnableMACAntiSpoofingFilterRules(381),
// Gluster peer status command
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("gluster peer status --xml")
GlusterPeerStatusCommand(378),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
MTUOverrideSupported(382),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("1800")
SSHInactivityHardTimoutSeconds(383),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("virt")
GlusterVolumeOptionGroupVirtValue(384),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("36")
GlusterVolumeOptionOwnerUserVirtValue(385),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("36")
GlusterVolumeOptionOwnerGroupVirtValue(386),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
IPTablesConfigForVirt(387),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
IPTablesConfigForGluster(388),
// Host time drift
@Reloadable
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
EnableHostTimeDrift(389),
@Reloadable
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("300")
HostTimeDriftInSec(390),
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("10000")
ThrottlerMaxWaitForVdsUpdateInMillis(391),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
OnlyRequiredNetworksMandatoryForVdsSelection(392),
@Reloadable
@TypeConverterAttribute(EngineWorkingMode.class)
@DefaultValueAttribute("Active")
EngineMode(393),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("false")
LiveStorageMigrationEnabled(394),
/**
* Refresh rate (in seconds) for light-weight gluster data i.e. data that can be fetched without much of an overhead
* on the GlusterFS processes
*/
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("5")
GlusterRefreshRateLight(395),
/**
* Refresh rate (in seconds) for heavy-weight gluster data i.e. commands to fetch such data adds a considerable
* overhead on the GlusterFS processes.
*/
@TypeConverterAttribute(Integer.class)
@DefaultValueAttribute("300")
GlusterRefreshRateHeavy(396),
@Reloadable
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
BootstrapMinimalVdsmVersion(397),
@TypeConverterAttribute(Boolean.class)
@DefaultValueAttribute("true")
SupportForceExtendVG(398),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
ENGINEEARLib(399),
@TypeConverterAttribute(String.class)
@DefaultValueAttribute("")
MinimalETLVersion(400),
Invalid(65535);
private int intValue;
private static Map<Integer, ConfigValues> mappings;
static {
mappings = new HashMap<Integer, ConfigValues>();
for (ConfigValues action : values()) {
mappings.put(action.getValue(), action);
}
}
private ConfigValues(int value) {
intValue = value;
}
public int getValue() {
return intValue;
}
public static ConfigValues forValue(int value) {
return mappings.get(value);
}
}
|
package com.couchbase.lite.replicator;
import com.couchbase.lite.CouchbaseLiteException;
import com.couchbase.lite.Database;
import com.couchbase.lite.Document;
import com.couchbase.lite.DocumentChange;
import com.couchbase.lite.Emitter;
import com.couchbase.lite.LiteTestCaseWithDB;
import com.couchbase.lite.LiveQuery;
import com.couchbase.lite.Manager;
import com.couchbase.lite.Mapper;
import com.couchbase.lite.Query;
import com.couchbase.lite.QueryEnumerator;
import com.couchbase.lite.QueryOptions;
import com.couchbase.lite.QueryRow;
import com.couchbase.lite.ReplicationFilter;
import com.couchbase.lite.Revision;
import com.couchbase.lite.SavedRevision;
import com.couchbase.lite.Status;
import com.couchbase.lite.TransactionalTask;
import com.couchbase.lite.UnsavedRevision;
import com.couchbase.lite.ValidationContext;
import com.couchbase.lite.Validator;
import com.couchbase.lite.View;
import com.couchbase.lite.auth.Authenticator;
import com.couchbase.lite.auth.AuthenticatorFactory;
import com.couchbase.lite.auth.BasicAuthenticator;
import com.couchbase.lite.auth.FacebookAuthorizer;
import com.couchbase.lite.internal.RevisionInternal;
import com.couchbase.lite.mockserver.MockBulkDocs;
import com.couchbase.lite.mockserver.MockChangesFeed;
import com.couchbase.lite.mockserver.MockChangesFeedNoResponse;
import com.couchbase.lite.mockserver.MockCheckpointGet;
import com.couchbase.lite.mockserver.MockCheckpointPut;
import com.couchbase.lite.mockserver.MockDispatcher;
import com.couchbase.lite.mockserver.MockDocumentBulkGet;
import com.couchbase.lite.mockserver.MockDocumentGet;
import com.couchbase.lite.mockserver.MockDocumentPut;
import com.couchbase.lite.mockserver.MockFacebookAuthPost;
import com.couchbase.lite.mockserver.MockHelper;
import com.couchbase.lite.mockserver.MockRevsDiff;
import com.couchbase.lite.mockserver.MockSessionGet;
import com.couchbase.lite.mockserver.SmartMockResponseImpl;
import com.couchbase.lite.mockserver.WrappedSmartMockResponse;
import com.couchbase.lite.support.Base64;
import com.couchbase.lite.support.CouchbaseLiteHttpClientFactory;
import com.couchbase.lite.support.HttpClientFactory;
import com.couchbase.lite.support.MultipartReader;
import com.couchbase.lite.support.MultipartReaderDelegate;
import com.couchbase.lite.support.RemoteRequestRetry;
import com.couchbase.lite.util.Log;
import com.couchbase.lite.util.Utils;
import com.couchbase.org.apache.http.entity.mime.MultipartEntity;
import com.squareup.okhttp.mockwebserver.MockResponse;
import com.squareup.okhttp.mockwebserver.MockWebServer;
import com.squareup.okhttp.mockwebserver.RecordedRequest;
import junit.framework.Assert;
import org.apache.http.HttpEntity;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
import org.apache.http.client.CookieStore;
import org.apache.http.client.HttpClient;
import org.apache.http.client.HttpResponseException;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.cookie.Cookie;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLDecoder;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Tests for the new state machine based replicator
*/
public class ReplicationTest extends LiteTestCaseWithDB {
/**
* TestCase(CreateReplicators) in ReplicationAPITests.m
*/
public void testCreateReplicators() throws Exception {
URL fakeRemoteURL = new URL("http://fake.fake/fakedb");
// Create a replicaton:
assertEquals(0, database.getAllReplications().size());
Replication r1 = database.createPushReplication(fakeRemoteURL);
assertNotNull(r1);
// Check the replication's properties:
assertEquals(database, r1.getLocalDatabase());
assertEquals(fakeRemoteURL, r1.getRemoteUrl());
assertFalse(r1.isPull());
assertFalse(r1.isContinuous());
assertFalse(r1.shouldCreateTarget());
assertNull(r1.getFilter());
assertNull(r1.getFilterParams());
assertNull(r1.getDocIds());
assertEquals(0, r1.getHeaders().size());
// Check that the replication hasn't started running:
assertFalse(r1.isRunning());
assertEquals(Replication.ReplicationStatus.REPLICATION_STOPPED, r1.getStatus());
assertEquals(0, r1.getChangesCount());
assertEquals(0, r1.getCompletedChangesCount());
assertNull(r1.getLastError());
// Create another replication:
Replication r2 = database.createPullReplication(fakeRemoteURL);
assertNotNull(r2);
assertTrue(r1 != r2);
// Check the replication's properties:
assertEquals(database, r2.getLocalDatabase());
assertEquals(fakeRemoteURL, r2.getRemoteUrl());
assertTrue(r2.isPull());
Replication r3 = database.createPullReplication(fakeRemoteURL);
assertNotNull(r3);
assertTrue(r3 != r2);
r3.setDocIds(Arrays.asList("doc1", "doc2"));
Replication repl = database.getManager().getReplicator(r3.getProperties());
assertEquals(r3.getDocIds(), repl.getDocIds());
}
/**
* Continuous puller starts offline
* Wait for a while .. (til what?)
* Add remote document (simulate w/ mock webserver)
* Put replication online
* Make sure doc is pulled
*/
public void testGoOnlinePuller() throws Exception {
Log.d(Log.TAG, "testGoOnlinePuller");
// create mock server
MockWebServer server = new MockWebServer();
try {
MockDispatcher dispatcher = new MockDispatcher();
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
server.setDispatcher(dispatcher);
server.play();
// mock documents to be pulled
MockDocumentGet.MockDocument mockDoc1 = new MockDocumentGet.MockDocument("doc1", "1-5e38", 1);
mockDoc1.setJsonMap(MockHelper.generateRandomJsonMap());
// checkpoint PUT or GET response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _changes response 503 error (sticky)
WrappedSmartMockResponse wrapped2 = new WrappedSmartMockResponse(new MockResponse().setResponseCode(503));
wrapped2.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, wrapped2);
// doc1 response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDoc1);
dispatcher.enqueueResponse(mockDoc1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
mockRevsDiff.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
mockBulkDocs.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// create and start replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
pullReplication.start();
Log.d(Log.TAG, "Started pullReplication: %s", pullReplication);
// wait until a _checkpoint request have been sent
dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHECKPOINT);
// wait until a _changes request has been sent
dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHANGES);
putReplicationOffline(pullReplication);
// clear out existing queued mock responses to make room for new ones
dispatcher.clearQueuedResponse(MockHelper.PATH_REGEX_CHANGES);
// real _changes response with doc1
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc1));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// long poll changes feed no response
MockChangesFeedNoResponse mockChangesFeedNoResponse = new MockChangesFeedNoResponse();
mockChangesFeedNoResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedNoResponse);
putReplicationOnline(pullReplication);
Log.d(Log.TAG, "Waiting for PUT checkpoint request with seq: %d", mockDoc1.getDocSeq());
waitForPutCheckpointRequestWithSeq(dispatcher, mockDoc1.getDocSeq());
Log.d(Log.TAG, "Got PUT checkpoint request with seq: %d", mockDoc1.getDocSeq());
stopReplication(pullReplication);
} finally {
server.shutdown();
}
}
/**
* Start continuous replication with a closed db.
* <p/>
* Expected behavior:
* - Receive replication finished callback
* - Replication lastError will contain an exception
*/
public void testStartReplicationClosedDb() throws Exception {
Database db = this.manager.getDatabase("closed");
final CountDownLatch countDownLatch = new CountDownLatch(1);
final Replication replication = db.createPullReplication(new URL("http://fake.com/foo"));
replication.setContinuous(true);
replication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
Log.d(TAG, "changed event: %s", event);
if (replication.isRunning() == false) {
countDownLatch.countDown();
}
}
});
db.close();
replication.start();
boolean success = countDownLatch.await(60, TimeUnit.SECONDS);
assertTrue(success);
assertTrue(replication.getLastError() != null);
}
/**
* Start a replication and stop it immediately
*/
public void failingTestStartReplicationStartStop() throws Exception {
final CountDownLatch countDownLatch = new CountDownLatch(1);
final List<ReplicationStateTransition> transitions = new ArrayList<ReplicationStateTransition>();
final Replication replication = database.createPullReplication(new URL("http://fake.com/foo"));
replication.setContinuous(true);
replication.addChangeListener(new ReplicationFinishedObserver(countDownLatch));
replication.start();
replication.start(); // this should be ignored
replication.stop();
replication.stop(); // this should be ignored
boolean success = countDownLatch.await(60, TimeUnit.SECONDS);
assertTrue(success);
assertTrue(replication.getLastError() == null);
assertEquals(3, transitions.size());
assertEquals(ReplicationState.INITIAL, transitions.get(0).getSource());
assertEquals(ReplicationState.RUNNING, transitions.get(0).getDestination());
assertEquals(ReplicationState.RUNNING, transitions.get(1).getSource());
assertEquals(ReplicationState.STOPPING, transitions.get(1).getDestination());
assertEquals(ReplicationState.STOPPING, transitions.get(2).getSource());
assertEquals(ReplicationState.STOPPED, transitions.get(2).getDestination());
}
/**
* Pull replication test:
* <p/>
* - Single one-shot pull replication
* - Against simulated sync gateway
* - Remote docs do not have attachments
*/
public void testMockSinglePullSyncGw() throws Exception {
boolean shutdownMockWebserver = true;
boolean addAttachments = false;
mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW, addAttachments);
}
/**
* Pull replication test:
* <p/>
* - Single one-shot pull replication
* - Against simulated couchdb
* - Remote docs do not have attachments
*/
public void testMockSinglePullCouchDb() throws Exception {
boolean shutdownMockWebserver = true;
boolean addAttachments = false;
mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.COUCHDB, addAttachments);
}
/**
* Pull replication test:
* <p/>
* - Single one-shot pull replication
* - Against simulated couchdb
* - Remote docs have attachments
*/
public void testMockSinglePullCouchDbAttachments() throws Exception {
boolean shutdownMockWebserver = true;
boolean addAttachments = true;
mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.COUCHDB, addAttachments);
}
/**
* Pull replication test:
* <p/>
* - Single one-shot pull replication
* - Against simulated sync gateway
* - Remote docs have attachments
* <p/>
* TODO: sporadic assertion failure when checking rev field of PUT checkpoint requests
*/
public void testMockSinglePullSyncGwAttachments() throws Exception {
boolean shutdownMockWebserver = true;
boolean addAttachments = true;
mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW, addAttachments);
}
public void testMockMultiplePullSyncGw() throws Exception {
boolean shutdownMockWebserver = true;
mockMultiplePull(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW);
}
public void testMockMultiplePullCouchDb() throws Exception {
boolean shutdownMockWebserver = true;
mockMultiplePull(shutdownMockWebserver, MockDispatcher.ServerType.COUCHDB);
}
public void testMockContinuousPullCouchDb() throws Exception {
// TODO: (IMPORTANT, FORESTDB) lastSequence for checkpoint does not match and couase dead lock
// if(!isSQLiteDB())
// fail("FORESTDB casues deadlock becasue of lastSequence mismatch for checkpoint");
boolean shutdownMockWebserver = true;
mockContinuousPull(shutdownMockWebserver, MockDispatcher.ServerType.COUCHDB);
}
/**
* Do a pull replication
*
* @param shutdownMockWebserver - should this test shutdown the mockwebserver
* when done? if another test wants to pick up
* where this left off, you should pass false.
* @param serverType - should the mock return the Sync Gateway server type in
* the "Server" HTTP Header? this changes the behavior of the
* replicator to use bulk_get and POST reqeusts for _changes feeds.
* @param addAttachments - should the mock sync gateway return docs with attachments?
* @return a map that contains the mockwebserver (key="server") and the mock dispatcher
* (key="dispatcher")
*/
public Map<String, Object> mockSinglePull(boolean shutdownMockWebserver, MockDispatcher.ServerType serverType, boolean addAttachments) throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
try {
dispatcher.setServerType(serverType);
// mock documents to be pulled
MockDocumentGet.MockDocument mockDoc1 = new MockDocumentGet.MockDocument("doc1", "1-5e38", 1);
mockDoc1.setJsonMap(MockHelper.generateRandomJsonMap());
mockDoc1.setAttachmentName("attachment.png");
MockDocumentGet.MockDocument mockDoc2 = new MockDocumentGet.MockDocument("doc2", "1-563b", 2);
mockDoc2.setJsonMap(MockHelper.generateRandomJsonMap());
mockDoc2.setAttachmentName("attachment2.png");
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc1));
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc2));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// doc1 response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDoc1);
if (addAttachments) {
mockDocumentGet.addAttachmentFilename(mockDoc1.getAttachmentName());
}
dispatcher.enqueueResponse(mockDoc1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// doc2 response
mockDocumentGet = new MockDocumentGet(mockDoc2);
if (addAttachments) {
mockDocumentGet.addAttachmentFilename(mockDoc2.getAttachmentName());
}
dispatcher.enqueueResponse(mockDoc2.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// _bulk_get response
MockDocumentBulkGet mockBulkGet = new MockDocumentBulkGet();
mockBulkGet.addDocument(mockDoc1);
mockBulkGet.addDocument(mockDoc2);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_GET, mockBulkGet);
// respond to all PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(500);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// start mock server
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
Map<String, Object> headers = new HashMap<String, Object>();
headers.put("foo", "bar");
pullReplication.setHeaders(headers);
String checkpointId = pullReplication.remoteCheckpointDocID();
runReplication(pullReplication);
Log.d(TAG, "pullReplication finished");
database.addChangeListener(new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
List<DocumentChange> changes = event.getChanges();
for (DocumentChange documentChange : changes) {
Log.d(TAG, "doc change callback: %s", documentChange.getDocumentId());
}
}
});
// assert that we now have both docs in local db
assertNotNull(database);
Document doc1 = database.getDocument(mockDoc1.getDocId());
assertNotNull(doc1);
assertNotNull(doc1.getCurrentRevisionId());
assertTrue(doc1.getCurrentRevisionId().equals(mockDoc1.getDocRev()));
assertNotNull(doc1.getProperties());
assertEquals(mockDoc1.getJsonMap(), doc1.getUserProperties());
Document doc2 = database.getDocument(mockDoc2.getDocId());
assertNotNull(doc2);
assertNotNull(doc2.getCurrentRevisionId());
assertNotNull(doc2.getProperties());
assertTrue(doc2.getCurrentRevisionId().equals(mockDoc2.getDocRev()));
assertEquals(mockDoc2.getJsonMap(), doc2.getUserProperties());
// assert that docs have attachments (if applicable)
if (addAttachments) {
attachmentAsserts(mockDoc1.getAttachmentName(), doc1);
attachmentAsserts(mockDoc2.getAttachmentName(), doc2);
}
// make assertions about outgoing requests from replicator -> mock
RecordedRequest getCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT);
assertNotNull(getCheckpointRequest);
assertEquals("bar", getCheckpointRequest.getHeader("foo"));
assertTrue(getCheckpointRequest.getMethod().equals("GET"));
assertTrue(getCheckpointRequest.getPath().matches(MockHelper.PATH_REGEX_CHECKPOINT));
RecordedRequest getChangesFeedRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES);
if (serverType == MockDispatcher.ServerType.SYNC_GW) {
assertTrue(getChangesFeedRequest.getMethod().equals("POST"));
} else {
assertTrue(getChangesFeedRequest.getMethod().equals("GET"));
}
assertTrue(getChangesFeedRequest.getPath().matches(MockHelper.PATH_REGEX_CHANGES));
// wait until the mock webserver receives a PUT checkpoint request with doc #2's sequence
Log.d(TAG, "waiting for PUT checkpoint %s", mockDoc2.getDocSeq());
List<RecordedRequest> checkpointRequests = waitForPutCheckpointRequestWithSequence(dispatcher, mockDoc2.getDocSeq());
validateCheckpointRequestsRevisions(checkpointRequests);
Log.d(TAG, "got PUT checkpoint %s", mockDoc2.getDocSeq());
// assert our local sequence matches what is expected
String lastSequence = database.lastSequenceWithCheckpointId(checkpointId);
assertEquals(Integer.toString(mockDoc2.getDocSeq()), lastSequence);
// assert completed count makes sense
assertEquals(pullReplication.getChangesCount(), pullReplication.getCompletedChangesCount());
// allow for either a single _bulk_get request or individual doc requests.
// if the server is sync gateway, it is allowable for replicator to use _bulk_get
RecordedRequest request = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_GET);
if (request != null) {
String body = MockHelper.getUtf8Body(request);
assertTrue(body.contains(mockDoc1.getDocId()));
assertTrue(body.contains(mockDoc2.getDocId()));
} else {
RecordedRequest doc1Request = dispatcher.takeRequest(mockDoc1.getDocPathRegex());
assertTrue(doc1Request.getMethod().equals("GET"));
assertTrue(doc1Request.getPath().matches(mockDoc1.getDocPathRegex()));
RecordedRequest doc2Request = dispatcher.takeRequest(mockDoc2.getDocPathRegex());
assertTrue(doc2Request.getMethod().equals("GET"));
assertTrue(doc2Request.getPath().matches(mockDoc2.getDocPathRegex()));
}
} finally {
// Shut down the server. Instances cannot be reused.
if (shutdownMockWebserver) {
server.shutdown();
}
}
Map<String, Object> returnVal = new HashMap<String, Object>();
returnVal.put("server", server);
returnVal.put("dispatcher", dispatcher);
return returnVal;
}
/**
* Simulate the following:
* <p/>
* - Add a few docs and do a pull replication
* - One doc on sync gateway is now updated
* - Do a second pull replication
* - Assert we get the updated doc and save it locally
*/
public Map<String, Object> mockMultiplePull(boolean shutdownMockWebserver, MockDispatcher.ServerType serverType) throws Exception {
String doc1Id = "doc1";
// create mockwebserver and custom dispatcher
boolean addAttachments = false;
// do a pull replication
Map<String, Object> serverAndDispatcher = mockSinglePull(false, serverType, addAttachments);
MockWebServer server = (MockWebServer) serverAndDispatcher.get("server");
MockDispatcher dispatcher = (MockDispatcher) serverAndDispatcher.get("dispatcher");
try {
// clear out any possible residue left from previous test, eg, mock responses queued up as
// any recorded requests that have been logged.
dispatcher.reset();
String doc1Rev = "2-2e38";
int doc1Seq = 3;
String checkpointRev = "0-1";
String checkpointLastSequence = "2";
// checkpoint GET response w/ seq = 2
MockCheckpointGet mockCheckpointGet = new MockCheckpointGet();
mockCheckpointGet.setOk("true");
mockCheckpointGet.setRev(checkpointRev);
mockCheckpointGet.setLastSequence(checkpointLastSequence);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointGet);
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
MockChangesFeed.MockChangedDoc mockChangedDoc1 = new MockChangesFeed.MockChangedDoc()
.setSeq(doc1Seq)
.setDocId(doc1Id)
.setChangedRevIds(Arrays.asList(doc1Rev));
mockChangesFeed.add(mockChangedDoc1);
MockResponse fakeChangesResponse = mockChangesFeed.generateMockResponse();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, fakeChangesResponse);
// doc1 response
Map<String, Object> doc1JsonMap = MockHelper.generateRandomJsonMap();
MockDocumentGet mockDocumentGet = new MockDocumentGet()
.setDocId(doc1Id)
.setRev(doc1Rev)
.setJsonMap(doc1JsonMap);
String doc1PathRegex = "/db/doc1.*";
dispatcher.enqueueResponse(doc1PathRegex, mockDocumentGet.generateMockResponse());
// checkpoint PUT response
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointGet.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
runReplication(pullReplication);
// assert that we now have both docs in local db
assertNotNull(database);
Document doc1 = database.getDocument(doc1Id);
assertNotNull(doc1);
assertNotNull(doc1.getCurrentRevisionId());
assertTrue(doc1.getCurrentRevisionId().startsWith("2-"));
assertEquals(doc1JsonMap, doc1.getUserProperties());
// make assertions about outgoing requests from replicator -> mock
RecordedRequest getCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT);
assertNotNull(getCheckpointRequest);
assertTrue(getCheckpointRequest.getMethod().equals("GET"));
assertTrue(getCheckpointRequest.getPath().matches(MockHelper.PATH_REGEX_CHECKPOINT));
RecordedRequest getChangesFeedRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES);
if (serverType == MockDispatcher.ServerType.SYNC_GW) {
assertTrue(getChangesFeedRequest.getMethod().equals("POST"));
} else {
assertTrue(getChangesFeedRequest.getMethod().equals("GET"));
}
assertTrue(getChangesFeedRequest.getPath().matches(MockHelper.PATH_REGEX_CHANGES));
if (serverType == MockDispatcher.ServerType.SYNC_GW) {
Map<String, Object> jsonMap = Manager.getObjectMapper().readValue(getChangesFeedRequest.getUtf8Body(), Map.class);
assertTrue(jsonMap.containsKey("since"));
Integer since = (Integer) jsonMap.get("since");
assertEquals(2, since.intValue());
}
RecordedRequest doc1Request = dispatcher.takeRequest(doc1PathRegex);
assertTrue(doc1Request.getMethod().equals("GET"));
assertTrue(doc1Request.getPath().matches("/db/doc1\\?rev=2-2e38.*"));
// wait until the mock webserver receives a PUT checkpoint request with doc #2's sequence
int expectedLastSequence = doc1Seq;
List<RecordedRequest> checkpointRequests = waitForPutCheckpointRequestWithSequence(dispatcher, expectedLastSequence);
assertEquals(1, checkpointRequests.size());
// assert our local sequence matches what is expected
String lastSequence = database.lastSequenceWithCheckpointId(pullReplication.remoteCheckpointDocID());
assertEquals(Integer.toString(expectedLastSequence), lastSequence);
// assert completed count makes sense
assertEquals(pullReplication.getChangesCount(), pullReplication.getCompletedChangesCount());
} finally {
if (shutdownMockWebserver) {
server.shutdown();
}
}
Map<String, Object> returnVal = new HashMap<String, Object>();
returnVal.put("server", server);
returnVal.put("dispatcher", dispatcher);
return returnVal;
}
public Map<String, Object> mockContinuousPull(boolean shutdownMockWebserver, MockDispatcher.ServerType serverType) throws Exception {
assertTrue(serverType == MockDispatcher.ServerType.COUCHDB);
final int numMockRemoteDocs = 20; // must be multiple of 10!
final AtomicInteger numDocsPulledLocally = new AtomicInteger(0);
MockDispatcher dispatcher = new MockDispatcher();
dispatcher.setServerType(serverType);
int numDocsPerChangesResponse = numMockRemoteDocs / 10;
MockWebServer server = MockHelper.getPreloadedPullTargetMockCouchDB(dispatcher, numMockRemoteDocs, numDocsPerChangesResponse);
try {
server.play();
final CountDownLatch receivedAllDocs = new CountDownLatch(1);
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
final CountDownLatch replicationDoneSignal = new CountDownLatch(1);
pullReplication.addChangeListener(new ReplicationFinishedObserver(replicationDoneSignal));
final CountDownLatch replicationIdleSignal = new CountDownLatch(1);
ReplicationIdleObserver idleObserver = new ReplicationIdleObserver(replicationIdleSignal);
pullReplication.addChangeListener(idleObserver);
database.addChangeListener(new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
List<DocumentChange> changes = event.getChanges();
for (DocumentChange change : changes) {
numDocsPulledLocally.addAndGet(1);
}
if (numDocsPulledLocally.get() == numMockRemoteDocs) {
receivedAllDocs.countDown();
}
}
});
pullReplication.start();
// wait until we received all mock docs or timeout occurs
boolean success = receivedAllDocs.await(60, TimeUnit.SECONDS);
assertTrue(success);
// make sure all docs in local db
Map<String, Object> allDocs = database.getAllDocs(new QueryOptions());
Integer totalRows = (Integer) allDocs.get("total_rows");
List rows = (List) allDocs.get("rows");
assertEquals(numMockRemoteDocs, totalRows.intValue());
assertEquals(numMockRemoteDocs, rows.size());
// wait until idle
success = replicationIdleSignal.await(30, TimeUnit.SECONDS);
assertTrue(success);
// cleanup / shutdown
pullReplication.stop();
success = replicationDoneSignal.await(30, TimeUnit.SECONDS);
assertTrue(success);
long lastSeq = database.getLastSequenceNumber();
Log.e(TAG, "lastSequence = %d", lastSeq);
// wait until the mock webserver receives a PUT checkpoint request with last do's sequence,
// this avoids ugly and confusing exceptions in the logs.
List<RecordedRequest> checkpointRequests = waitForPutCheckpointRequestWithSequence(dispatcher, numMockRemoteDocs - 1);
validateCheckpointRequestsRevisions(checkpointRequests);
} finally {
if (shutdownMockWebserver) {
server.shutdown();
}
}
Map<String, Object> returnVal = new HashMap<String, Object>();
returnVal.put("server", server);
returnVal.put("dispatcher", dispatcher);
return returnVal;
}
public void testAttachmentsDeletedOnPull() throws Exception {
String doc1Id = "doc1";
int doc1Rev2Generation = 2;
String doc1Rev2Digest = "b000";
String doc1Rev2 = String.format("%d-%s", doc1Rev2Generation, doc1Rev2Digest);
int doc1Seq1 = 1;
String doc1AttachName = "attachment.png";
String contentType = "image/png";
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
try {
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
server.play();
// add some documents - verify it has an attachment
Document doc1 = createDocumentForPushReplication(doc1Id, doc1AttachName, contentType);
String doc1Rev1 = doc1.getCurrentRevisionId();
doc1 = database.getDocument(doc1.getId());
assertTrue(doc1.getCurrentRevision().getAttachments().size() > 0);
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// checkpoint PUT response
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// add response to 1st _changes request
final MockDocumentGet.MockDocument mockDocument1 = new MockDocumentGet.MockDocument(
doc1Id, doc1Rev2, doc1Seq1);
Map<String, Object> newProperties = new HashMap<String, Object>(doc1.getProperties());
newProperties.put("_rev", doc1Rev2);
mockDocument1.setJsonMap(newProperties);
mockDocument1.setAttachmentName(doc1AttachName);
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument1));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// add sticky _changes response to feed=longpoll that just blocks for 60 seconds to emulate
// server that doesn't have any new changes
MockChangesFeedNoResponse mockChangesFeedNoResponse = new MockChangesFeedNoResponse();
mockChangesFeedNoResponse.setDelayMs(60 * 1000);
mockChangesFeedNoResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedNoResponse);
// add response to doc get
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDocument1);
mockDocumentGet.addAttachmentFilename(mockDocument1.getAttachmentName());
mockDocumentGet.setIncludeAttachmentPart(false);
Map<String, Object> revHistory = new HashMap<String, Object>();
revHistory.put("start", doc1Rev2Generation);
List ids = Arrays.asList(
RevisionInternal.digestFromRevID(doc1Rev2),
RevisionInternal.digestFromRevID(doc1Rev1)
);
revHistory.put("ids", ids);
mockDocumentGet.setRevHistoryMap(revHistory);
dispatcher.enqueueResponse(mockDocument1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// create and start pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
pullReplication.start();
// wait for the next PUT checkpoint request/response
waitForPutCheckpointRequestWithSeq(dispatcher, 1);
stopReplication(pullReplication);
// make sure doc has attachments
Document doc1Fetched = database.getDocument(doc1.getId());
assertTrue(doc1Fetched.getCurrentRevision().getAttachments().size() > 0);
} finally {
server.shutdown();
}
}
/**
* This is essentially a regression test for a deadlock
* that was happening when the LiveQuery#onDatabaseChanged()
* was calling waitForUpdateThread(), but that thread was
* waiting on connection to be released by the thread calling
* waitForUpdateThread(). When the deadlock bug was present,
* this test would trigger the deadlock and never finish.
* <p/>
* TODO: sporadic assertion failure when checking rev field of PUT checkpoint requests
*/
public void testPullerWithLiveQuery() throws Throwable {
View view = database.getView("testPullerWithLiveQueryView");
view.setMapReduce(new Mapper() {
@Override
public void map(Map<String, Object> document, Emitter emitter) {
if (document.get("_id") != null) {
emitter.emit(document.get("_id"), null);
}
}
}, null, "1");
final CountDownLatch countDownLatch = new CountDownLatch(1);
LiveQuery allDocsLiveQuery = view.createQuery().toLiveQuery();
allDocsLiveQuery.addChangeListener(new LiveQuery.ChangeListener() {
@Override
public void changed(LiveQuery.ChangeEvent event) {
int numTimesCalled = 0;
if (event.getError() != null) {
throw new RuntimeException(event.getError());
}
if (event.getRows().getCount() == 2) {
countDownLatch.countDown();
}
}
});
// kick off live query
allDocsLiveQuery.start();
// do pull replication against mock
mockSinglePull(true, MockDispatcher.ServerType.SYNC_GW, true);
// make sure we were called back with both docs
boolean success = countDownLatch.await(30, TimeUnit.SECONDS);
assertTrue(success);
// clean up
allDocsLiveQuery.stop();
}
/**
* Make sure that if a continuous push gets an error
* pushing a doc, it will keep retrying it rather than giving up right away.
*
* @throws Exception
*/
public void testContinuousPushRetryBehavior() throws Exception {
RemoteRequestRetry.RETRY_DELAY_MS = 5; // speed up test execution (inner loop retry delay)
ReplicationInternal.RETRY_DELAY_SECONDS = 1; // speed up test execution (outer loop retry delay)
ReplicationInternal.MAX_RETRIES = 3; // spped up test execution (outer loop retry count)
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint GET response w/ 404 + respond to all PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(500);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
mockRevsDiff.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- 503 errors
MockResponse mockResponse = new MockResponse().setResponseCode(503);
WrappedSmartMockResponse mockBulkDocs = new WrappedSmartMockResponse(mockResponse, false);
mockBulkDocs.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
server.play();
// create replication
Replication replication = database.createPushReplication(server.getUrl("/db"));
replication.setContinuous(true);
CountDownLatch replicationIdle = new CountDownLatch(1);
ReplicationIdleObserver idleObserver = new ReplicationIdleObserver(replicationIdle);
replication.addChangeListener(idleObserver);
replication.start();
// wait until idle
boolean success = replicationIdle.await(30, TimeUnit.SECONDS);
assertTrue(success);
replication.removeChangeListener(idleObserver);
// create a doc in local db
Document doc1 = createDocumentForPushReplication("doc1", null, null);
// we should expect to at least see numAttempts attempts at doing POST to _bulk_docs
// 1st attempt
// numAttempts are number of times retry in 1 attempt.
int numAttempts = RemoteRequestRetry.MAX_RETRIES + 1; // total number of attempts = 4 (1 initial + MAX_RETRIES)
for (int i = 0; i < numAttempts; i++) {
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(request);
dispatcher.takeRecordedResponseBlocking(request);
}
// By 12/16/2014, CBL core java tries RemoteRequestRetry.MAX_RETRIES + 1 see above.
// Without fixing #299, following code should cause hang.
// outer retry loop
for (int j = 0; j < ReplicationInternal.MAX_RETRIES; j++) {
// inner retry loop
for (int i = 0; i < numAttempts; i++) {
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(request);
dispatcher.takeRecordedResponseBlocking(request);
}
}
// gave up replication!!!
stopReplication(replication);
} finally {
server.shutdown();
}
}
public void testMockSinglePush() throws Exception {
boolean shutdownMockWebserver = true;
mockSinglePush(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW);
}
/**
* Do a push replication
* <p/>
* - Create docs in local db
* - One with no attachment
* - One with small attachment
* - One with large attachment
*/
public Map<String, Object> mockSinglePush(boolean shutdownMockWebserver, MockDispatcher.ServerType serverType) throws Exception {
String doc1Id = "doc1";
String doc2Id = "doc2";
String doc3Id = "doc3";
String doc4Id = "doc4";
String doc2PathRegex = String.format("/db/%s.*", doc2Id);
String doc3PathRegex = String.format("/db/%s.*", doc3Id);
String doc2AttachName = "attachment.png";
String doc3AttachName = "attachment2.png";
String contentType = "image/png";
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(serverType);
try {
server.play();
// add some documents
Document doc1 = createDocumentForPushReplication(doc1Id, null, null);
Document doc2 = createDocumentForPushReplication(doc2Id, doc2AttachName, contentType);
Document doc3 = createDocumentForPushReplication(doc3Id, doc3AttachName, contentType);
Document doc4 = createDocumentForPushReplication(doc4Id, null, null);
doc4.delete();
// checkpoint GET response w/ 404 + respond to all PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(50);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// doc PUT responses for docs with attachments
MockDocumentPut mockDoc2Put = new MockDocumentPut()
.setDocId(doc2Id)
.setRev(doc2.getCurrentRevisionId());
dispatcher.enqueueResponse(doc2PathRegex, mockDoc2Put.generateMockResponse());
MockDocumentPut mockDoc3Put = new MockDocumentPut()
.setDocId(doc3Id)
.setRev(doc3.getCurrentRevisionId());
dispatcher.enqueueResponse(doc3PathRegex, mockDoc3Put.generateMockResponse());
// run replication
Replication replication = database.createPushReplication(server.getUrl("/db"));
replication.setContinuous(false);
if (serverType != MockDispatcher.ServerType.SYNC_GW) {
replication.setCreateTarget(true);
Assert.assertTrue(replication.shouldCreateTarget());
}
runReplication(replication);
// make assertions about outgoing requests from replicator -> mock
RecordedRequest getCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT);
assertTrue(getCheckpointRequest.getMethod().equals("GET"));
assertTrue(getCheckpointRequest.getPath().matches(MockHelper.PATH_REGEX_CHECKPOINT));
RecordedRequest revsDiffRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_REVS_DIFF);
assertTrue(MockHelper.getUtf8Body(revsDiffRequest).contains(doc1Id));
RecordedRequest bulkDocsRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_DOCS);
assertTrue(MockHelper.getUtf8Body(bulkDocsRequest).contains(doc1Id));
Map<String, Object> bulkDocsJson = Manager.getObjectMapper().readValue(MockHelper.getUtf8Body(bulkDocsRequest), Map.class);
Map<String, Object> doc4Map = MockBulkDocs.findDocById(bulkDocsJson, doc4Id);
assertTrue(((Boolean) doc4Map.get("_deleted")).booleanValue() == true);
String str = MockHelper.getUtf8Body(bulkDocsRequest);
Log.e(TAG, str);
assertFalse(MockHelper.getUtf8Body(bulkDocsRequest).contains(doc2Id));
RecordedRequest doc2putRequest = dispatcher.takeRequest(doc2PathRegex);
CustomMultipartReaderDelegate delegate2 = new CustomMultipartReaderDelegate();
MultipartReader reader2 = new MultipartReader(doc2putRequest.getHeader("Content-Type"), delegate2);
reader2.appendData(doc2putRequest.getBody());
String body2 = new String(delegate2.data, "UTF-8");
assertTrue(body2.contains(doc2Id));
assertFalse(body2.contains(doc3Id));
RecordedRequest doc3putRequest = dispatcher.takeRequest(doc3PathRegex);
CustomMultipartReaderDelegate delegate3 = new CustomMultipartReaderDelegate();
MultipartReader reader3 = new MultipartReader(doc3putRequest.getHeader("Content-Type"), delegate3);
reader3.appendData(doc3putRequest.getBody());
String body3 = new String(delegate3.data, "UTF-8");
assertTrue(body3.contains(doc3Id));
assertFalse(body3.contains(doc2Id));
// wait until the mock webserver receives a PUT checkpoint request
int expectedLastSequence = 5;
Log.d(TAG, "waiting for put checkpoint with lastSequence: %d", expectedLastSequence);
List<RecordedRequest> checkpointRequests = waitForPutCheckpointRequestWithSequence(dispatcher, expectedLastSequence);
Log.d(TAG, "done waiting for put checkpoint with lastSequence: %d", expectedLastSequence);
validateCheckpointRequestsRevisions(checkpointRequests);
// assert our local sequence matches what is expected
String lastSequence = database.lastSequenceWithCheckpointId(replication.remoteCheckpointDocID());
assertEquals(Integer.toString(expectedLastSequence), lastSequence);
// assert completed count makes sense
assertEquals(replication.getChangesCount(), replication.getCompletedChangesCount());
} finally {
// Shut down the server. Instances cannot be reused.
if (shutdownMockWebserver) {
server.shutdown();
}
}
Map<String, Object> returnVal = new HashMap<String, Object>();
returnVal.put("server", server);
returnVal.put("dispatcher", dispatcher);
return returnVal;
}
public void testContinuousPushReplicationGoesIdle() throws Exception {
// make sure we are starting empty
assertEquals(0, database.getLastSequenceNumber());
// add docs
Map<String, Object> properties1 = new HashMap<String, Object>();
properties1.put("doc1", "testContinuousPushReplicationGoesIdle");
final Document doc1 = createDocWithProperties(properties1);
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
server.play();
// checkpoint GET response w/ 404. also receives checkpoint PUT's
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// replication to do initial sync up - has to be continuous replication so the checkpoint id
// matches the next continuous replication we're gonna do later.
Replication firstPusher = database.createPushReplication(server.getUrl("/db"));
firstPusher.setContinuous(true);
final String checkpointId = firstPusher.remoteCheckpointDocID(); // save the checkpoint id for later usage
// start the continuous replication
CountDownLatch replicationIdleSignal = new CountDownLatch(1);
ReplicationIdleObserver replicationIdleObserver = new ReplicationIdleObserver(replicationIdleSignal);
firstPusher.addChangeListener(replicationIdleObserver);
firstPusher.start();
// wait until we get an IDLE event
boolean successful = replicationIdleSignal.await(30, TimeUnit.SECONDS);
assertTrue(successful);
stopReplication(firstPusher);
// wait until replication does PUT checkpoint with lastSequence=1
int expectedLastSequence = 1;
waitForPutCheckpointRequestWithSeq(dispatcher, expectedLastSequence);
// the last sequence should be "1" at this point. we will use this later
final String lastSequence = database.lastSequenceWithCheckpointId(checkpointId);
assertEquals("1", lastSequence);
// start a second continuous replication
Replication secondPusher = database.createPushReplication(server.getUrl("/db"));
secondPusher.setContinuous(true);
final String secondPusherCheckpointId = secondPusher.remoteCheckpointDocID();
assertEquals(checkpointId, secondPusherCheckpointId);
// remove current handler for the GET/PUT checkpoint request, and
// install a new handler that returns the lastSequence from previous replication
dispatcher.clearQueuedResponse(MockHelper.PATH_REGEX_CHECKPOINT);
MockCheckpointGet mockCheckpointGet = new MockCheckpointGet();
mockCheckpointGet.setLastSequence(lastSequence);
mockCheckpointGet.setRev("0-2");
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointGet);
// start second replication
replicationIdleSignal = new CountDownLatch(1);
replicationIdleObserver = new ReplicationIdleObserver(replicationIdleSignal);
secondPusher.addChangeListener(replicationIdleObserver);
secondPusher.start();
// wait until we get an IDLE event
successful = replicationIdleSignal.await(30, TimeUnit.SECONDS);
assertTrue(successful);
stopReplication(secondPusher);
} finally {
server.shutdown();
}
}
public void testContinuousReplication404Changes() throws Exception {
int previous = PullerInternal.CHANGE_TRACKER_RESTART_DELAY_MS;
PullerInternal.CHANGE_TRACKER_RESTART_DELAY_MS = 5;
try {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
server.play();
// mock checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// mock _changes response
for (int i = 0; i < 100; i++) {
MockResponse mockChangesFeed = new MockResponse();
MockHelper.set404NotFoundJson(mockChangesFeed);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed);
}
// create new replication
int retryDelaySeconds = 1;
Replication pull = database.createPullReplication(server.getUrl("/db"));
pull.setContinuous(true);
// add done listener to replication
CountDownLatch replicationDoneSignal = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal);
pull.addChangeListener(replicationFinishedObserver);
// start the replication
pull.start();
// wait until we get a few requests
Log.d(TAG, "Waiting for a _changes request");
RecordedRequest changesReq = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHANGES);
Log.d(TAG, "Got first _changes request, waiting for another _changes request");
changesReq = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHANGES);
Log.d(TAG, "Got second _changes request, waiting for another _changes request");
changesReq = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHANGES);
Log.d(TAG, "Got third _changes request, stopping replicator");
// the replication should still be running
assertEquals(1, replicationDoneSignal.getCount());
// cleanup
stopReplication(pull);
} finally {
server.shutdown();
}
} finally {
PullerInternal.CHANGE_TRACKER_RESTART_DELAY_MS = previous;
}
}
/**
* Regression test for issue couchbase/couchbase-lite-android#174
*/
public void testAllLeafRevisionsArePushed() throws Exception {
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderRevDiffsAllMissing();
mockHttpClient.setResponseDelayMilliseconds(250);
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
HttpClientFactory mockHttpClientFactory = new HttpClientFactory() {
@Override
public HttpClient getHttpClient() {
return mockHttpClient;
}
@Override
public void addCookies(List<Cookie> cookies) {
}
@Override
public void deleteCookie(String name) {
}
@Override
public CookieStore getCookieStore() {
return null;
}
};
manager.setDefaultHttpClientFactory(mockHttpClientFactory);
Document doc = database.createDocument();
SavedRevision rev1a = doc.createRevision().save();
SavedRevision rev2a = createRevisionWithRandomProps(rev1a, false);
SavedRevision rev3a = createRevisionWithRandomProps(rev2a, false);
// delete the branch we've been using, then create a new one to replace it
SavedRevision rev4a = rev3a.deleteDocument();
SavedRevision rev2b = createRevisionWithRandomProps(rev1a, true);
assertEquals(rev2b.getId(), doc.getCurrentRevisionId());
// sync with remote DB -- should push both leaf revisions
Replication push = database.createPushReplication(getReplicationURL());
runReplication(push);
assertNull(push.getLastError());
// find the _revs_diff captured request and decode into json
boolean foundRevsDiff = false;
List<HttpRequest> captured = mockHttpClient.getCapturedRequests();
for (HttpRequest httpRequest : captured) {
if (httpRequest instanceof HttpPost) {
HttpPost httpPost = (HttpPost) httpRequest;
if (httpPost.getURI().toString().endsWith("_revs_diff")) {
foundRevsDiff = true;
Map<String, Object> jsonMap = CustomizableMockHttpClient.getJsonMapFromRequest(httpPost);
// assert that it contains the expected revisions
List<String> revisionIds = (List) jsonMap.get(doc.getId());
assertEquals(2, revisionIds.size());
assertTrue(revisionIds.contains(rev4a.getId()));
assertTrue(revisionIds.contains(rev2b.getId()));
}
}
}
assertTrue(foundRevsDiff);
}
public void failingTestRemoteConflictResolution() throws Exception {
// Create a document with two conflicting edits.
Document doc = database.createDocument();
SavedRevision rev1 = doc.createRevision().save();
SavedRevision rev2a = createRevisionWithRandomProps(rev1, false);
SavedRevision rev2b = createRevisionWithRandomProps(rev1, true);
// make sure we can query the db to get the conflict
Query allDocsQuery = database.createAllDocumentsQuery();
allDocsQuery.setAllDocsMode(Query.AllDocsMode.ONLY_CONFLICTS);
QueryEnumerator rows = allDocsQuery.run();
boolean foundDoc = false;
assertEquals(1, rows.getCount());
for (Iterator<QueryRow> it = rows; it.hasNext(); ) {
QueryRow row = it.next();
if (row.getDocument().getId().equals(doc.getId())) {
foundDoc = true;
}
}
assertTrue(foundDoc);
// make sure doc in conflict
assertTrue(doc.getConflictingRevisions().size() > 1);
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
try {
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
int rev3PromotedGeneration = 3;
String rev3PromotedDigest = "d46b";
String rev3Promoted = String.format("%d-%s", rev3PromotedGeneration, rev3PromotedDigest);
int rev3DeletedGeneration = 3;
String rev3DeletedDigest = "e768";
String rev3Deleted = String.format("%d-%s", rev3DeletedGeneration, rev3DeletedDigest);
int seq = 4;
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
MockChangesFeed.MockChangedDoc mockChangedDoc = new MockChangesFeed.MockChangedDoc();
mockChangedDoc.setDocId(doc.getId());
mockChangedDoc.setSeq(seq);
mockChangedDoc.setChangedRevIds(Arrays.asList(rev3Promoted, rev3Deleted));
mockChangesFeed.add(mockChangedDoc);
MockResponse response = mockChangesFeed.generateMockResponse();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, response);
// docRev3Promoted response
MockDocumentGet.MockDocument docRev3Promoted = new MockDocumentGet.MockDocument(doc.getId(), rev3Promoted, seq);
docRev3Promoted.setJsonMap(MockHelper.generateRandomJsonMap());
MockDocumentGet mockDocRev3PromotedGet = new MockDocumentGet(docRev3Promoted);
Map<String, Object> rev3PromotedRevHistory = new HashMap<String, Object>();
rev3PromotedRevHistory.put("start", rev3PromotedGeneration);
List ids = Arrays.asList(
rev3PromotedDigest,
RevisionInternal.digestFromRevID(rev2a.getId()),
RevisionInternal.digestFromRevID(rev2b.getId())
);
rev3PromotedRevHistory.put("ids", ids);
mockDocRev3PromotedGet.setRevHistoryMap(rev3PromotedRevHistory);
dispatcher.enqueueResponse(docRev3Promoted.getDocPathRegex(), mockDocRev3PromotedGet.generateMockResponse());
// docRev3Deleted response
MockDocumentGet.MockDocument docRev3Deleted = new MockDocumentGet.MockDocument(doc.getId(), rev3Deleted, seq);
Map<String, Object> jsonMap = MockHelper.generateRandomJsonMap();
jsonMap.put("_deleted", true);
docRev3Deleted.setJsonMap(jsonMap);
MockDocumentGet mockDocRev3DeletedGet = new MockDocumentGet(docRev3Deleted);
Map<String, Object> rev3DeletedRevHistory = new HashMap<String, Object>();
rev3DeletedRevHistory.put("start", rev3DeletedGeneration);
ids = Arrays.asList(
rev3DeletedDigest,
RevisionInternal.digestFromRevID(rev2b.getId()),
RevisionInternal.digestFromRevID(rev1.getId())
);
rev3DeletedRevHistory.put("ids", ids);
mockDocRev3DeletedGet.setRevHistoryMap(rev3DeletedRevHistory);
dispatcher.enqueueResponse(docRev3Deleted.getDocPathRegex(), mockDocRev3DeletedGet.generateMockResponse());
// start mock server
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
runReplication(pullReplication);
assertNull(pullReplication.getLastError());
// assertions about outgoing requests
RecordedRequest changesRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES);
assertNotNull(changesRequest);
RecordedRequest docRev3DeletedRequest = dispatcher.takeRequest(docRev3Deleted.getDocPathRegex());
assertNotNull(docRev3DeletedRequest);
RecordedRequest docRev3PromotedRequest = dispatcher.takeRequest(docRev3Promoted.getDocPathRegex());
assertNotNull(docRev3PromotedRequest);
// Make sure the conflict was resolved locally.
assertEquals(1, doc.getConflictingRevisions().size());
} finally {
server.shutdown();
}
}
public void testPushReplicationCanMissDocs() throws Exception {
assertEquals(0, database.getLastSequenceNumber());
Map<String, Object> properties1 = new HashMap<String, Object>();
properties1.put("doc1", "testPushReplicationCanMissDocs");
final Document doc1 = createDocWithProperties(properties1);
Map<String, Object> properties2 = new HashMap<String, Object>();
properties1.put("doc2", "testPushReplicationCanMissDocs");
final Document doc2 = createDocWithProperties(properties2);
UnsavedRevision doc2UnsavedRev = doc2.createRevision();
InputStream attachmentStream = getAsset("attachment.png");
doc2UnsavedRev.setAttachment("attachment.png", "image/png", attachmentStream);
SavedRevision doc2Rev = doc2UnsavedRev.save();
assertNotNull(doc2Rev);
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
mockHttpClient.setResponder("_bulk_docs", new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
String json = "{\"error\":\"not_found\",\"reason\":\"missing\"}";
return CustomizableMockHttpClient.generateHttpResponseObject(404, "NOT FOUND", json);
}
});
mockHttpClient.setResponder(doc2.getId(), new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
Map<String, Object> responseObject = new HashMap<String, Object>();
responseObject.put("id", doc2.getId());
responseObject.put("ok", true);
responseObject.put("rev", doc2.getCurrentRevisionId());
return CustomizableMockHttpClient.generateHttpResponseObject(responseObject);
}
});
// create a replication obeserver to wait until replication finishes
CountDownLatch replicationDoneSignal = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal);
// create replication and add observer
manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient));
Replication pusher = database.createPushReplication(getReplicationURL());
pusher.addChangeListener(replicationFinishedObserver);
// save the checkpoint id for later usage
String checkpointId = pusher.remoteCheckpointDocID();
// kick off the replication
pusher.start();
// wait for it to finish
boolean success = replicationDoneSignal.await(60, TimeUnit.SECONDS);
assertTrue(success);
Log.d(TAG, "replicationDoneSignal finished");
// we would expect it to have recorded an error because one of the docs (the one without the attachment)
// will have failed.
assertNotNull(pusher.getLastError());
// workaround for the fact that the replicationDoneSignal.wait() call will unblock before all
// the statements in Replication.stopped() have even had a chance to execute.
// (specifically the ones that come after the call to notifyChangeListeners())
Thread.sleep(500);
String localLastSequence = database.lastSequenceWithCheckpointId(checkpointId);
Log.d(TAG, "database.lastSequenceWithCheckpointId(): " + localLastSequence);
Log.d(TAG, "doc2.getCurrentRevision().getSequence(): " + doc2.getCurrentRevision().getSequence());
String msg = "Since doc1 failed, the database should _not_ have had its lastSequence bumped" +
" to doc2's sequence number. If it did, it's bug: github.com/couchbase/couchbase-lite-java-core/issues/95";
assertFalse(msg, Long.toString(doc2.getCurrentRevision().getSequence()).equals(localLastSequence));
assertNull(localLastSequence);
assertTrue(doc2.getCurrentRevision().getSequence() > 0);
}
public void testPushUpdatedDocWithoutReSendingAttachments() throws Exception {
assertEquals(0, database.getLastSequenceNumber());
Map<String, Object> properties1 = new HashMap<String, Object>();
properties1.put("dynamic", 1);
final Document doc = createDocWithProperties(properties1);
SavedRevision doc1Rev = doc.getCurrentRevision();
// Add attachment to document
UnsavedRevision doc2UnsavedRev = doc.createRevision();
InputStream attachmentStream = getAsset("attachment.png");
doc2UnsavedRev.setAttachment("attachment.png", "image/png", attachmentStream);
SavedRevision doc2Rev = doc2UnsavedRev.save();
assertNotNull(doc2Rev);
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
mockHttpClient.setResponder(doc.getId(), new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
Map<String, Object> responseObject = new HashMap<String, Object>();
responseObject.put("id", doc.getId());
responseObject.put("ok", true);
responseObject.put("rev", doc.getCurrentRevisionId());
return CustomizableMockHttpClient.generateHttpResponseObject(responseObject);
}
});
// create replication and add observer
manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient));
Replication pusher = database.createPushReplication(getReplicationURL());
runReplication(pusher);
List<HttpRequest> captured = mockHttpClient.getCapturedRequests();
for (HttpRequest httpRequest : captured) {
// verify that there are no PUT requests with attachments
if (httpRequest instanceof HttpPut) {
HttpPut httpPut = (HttpPut) httpRequest;
HttpEntity entity = httpPut.getEntity();
//assertFalse("PUT request with updated doc properties contains attachment", entity instanceof MultipartEntity);
}
}
mockHttpClient.clearCapturedRequests();
Document oldDoc = database.getDocument(doc.getId());
UnsavedRevision aUnsavedRev = oldDoc.createRevision();
Map<String, Object> prop = new HashMap<String, Object>();
prop.putAll(oldDoc.getProperties());
prop.put("dynamic", (Integer) oldDoc.getProperty("dynamic") + 1);
aUnsavedRev.setProperties(prop);
final SavedRevision savedRev = aUnsavedRev.save();
mockHttpClient.setResponder(doc.getId(), new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
Map<String, Object> responseObject = new HashMap<String, Object>();
responseObject.put("id", doc.getId());
responseObject.put("ok", true);
responseObject.put("rev", savedRev.getId());
return CustomizableMockHttpClient.generateHttpResponseObject(responseObject);
}
});
final String json = String.format("{\"%s\":{\"missing\":[\"%s\"],\"possible_ancestors\":[\"%s\",\"%s\"]}}", doc.getId(), savedRev.getId(), doc1Rev.getId(), doc2Rev.getId());
mockHttpClient.setResponder("_revs_diff", new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
return mockHttpClient.generateHttpResponseObject(json);
}
});
pusher = database.createPushReplication(getReplicationURL());
runReplication(pusher);
captured = mockHttpClient.getCapturedRequests();
for (HttpRequest httpRequest : captured) {
// verify that there are no PUT requests with attachments
if (httpRequest instanceof HttpPut) {
HttpPut httpPut = (HttpPut) httpRequest;
HttpEntity entity = httpPut.getEntity();
assertFalse("PUT request with updated doc properties contains attachment", entity instanceof MultipartEntity);
}
}
}
public void testServerDoesNotSupportMultipart() throws Exception {
assertEquals(0, database.getLastSequenceNumber());
Map<String, Object> properties1 = new HashMap<String, Object>();
properties1.put("dynamic", 1);
final Document doc = createDocWithProperties(properties1);
SavedRevision doc1Rev = doc.getCurrentRevision();
// Add attachment to document
UnsavedRevision doc2UnsavedRev = doc.createRevision();
InputStream attachmentStream = getAsset("attachment.png");
doc2UnsavedRev.setAttachment("attachment.png", "image/png", attachmentStream);
SavedRevision doc2Rev = doc2UnsavedRev.save();
assertNotNull(doc2Rev);
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
Queue<CustomizableMockHttpClient.Responder> responders = new LinkedList<CustomizableMockHttpClient.Responder>();
//Reject multipart PUT with response code 415
responders.add(new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
String json = "{\"error\":\"Unsupported Media Type\",\"reason\":\"missing\"}";
return CustomizableMockHttpClient.generateHttpResponseObject(415, "Unsupported Media Type", json);
}
});
// second call should be plain json, return good response
responders.add(new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
Map<String, Object> responseObject = new HashMap<String, Object>();
responseObject.put("id", doc.getId());
responseObject.put("ok", true);
responseObject.put("rev", doc.getCurrentRevisionId());
return CustomizableMockHttpClient.generateHttpResponseObject(responseObject);
}
});
ResponderChain responderChain = new ResponderChain(responders);
mockHttpClient.setResponder(doc.getId(), responderChain);
// create replication and add observer
manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient));
Replication pusher = database.createPushReplication(getReplicationURL());
runReplication(pusher);
List<HttpRequest> captured = mockHttpClient.getCapturedRequests();
int entityIndex = 0;
for (HttpRequest httpRequest : captured) {
// verify that there are no PUT requests with attachments
if (httpRequest instanceof HttpPut) {
HttpPut httpPut = (HttpPut) httpRequest;
HttpEntity entity = httpPut.getEntity();
if (entityIndex++ == 0) {
assertTrue("PUT request with attachment is not multipart", entity instanceof MultipartEntity);
} else {
assertFalse("PUT request with attachment is multipart", entity instanceof MultipartEntity);
}
}
}
}
public void testServerIsSyncGatewayVersion() throws Exception {
Replication pusher = database.createPushReplication(getReplicationURL());
assertFalse(pusher.serverIsSyncGatewayVersion("0.01"));
pusher.setServerType("Couchbase Sync Gateway/0.93");
assertTrue(pusher.serverIsSyncGatewayVersion("0.92"));
assertFalse(pusher.serverIsSyncGatewayVersion("0.94"));
}
public void testDifferentCheckpointsFilteredReplication() throws Exception {
Replication pullerNoFilter = database.createPullReplication(getReplicationURL());
String noFilterCheckpointDocId = pullerNoFilter.remoteCheckpointDocID();
Replication pullerWithFilter1 = database.createPullReplication(getReplicationURL());
pullerWithFilter1.setFilter("foo/bar");
Map<String, Object> filterParams = new HashMap<String, Object>();
filterParams.put("a", "aval");
filterParams.put("b", "bval");
List<String> docIds = Arrays.asList("doc3", "doc1", "doc2");
pullerWithFilter1.setDocIds(docIds);
assertEquals(docIds, pullerWithFilter1.getDocIds());
pullerWithFilter1.setFilterParams(filterParams);
String withFilterCheckpointDocId = pullerWithFilter1.remoteCheckpointDocID();
assertFalse(withFilterCheckpointDocId.equals(noFilterCheckpointDocId));
Replication pullerWithFilter2 = database.createPullReplication(getReplicationURL());
pullerWithFilter2.setFilter("foo/bar");
filterParams = new HashMap<String, Object>();
filterParams.put("b", "bval");
filterParams.put("a", "aval");
pullerWithFilter2.setDocIds(Arrays.asList("doc2", "doc3", "doc1"));
pullerWithFilter2.setFilterParams(filterParams);
String withFilterCheckpointDocId2 = pullerWithFilter2.remoteCheckpointDocID();
assertTrue(withFilterCheckpointDocId.equals(withFilterCheckpointDocId2));
}
public void testSetReplicationCookie() throws Exception {
URL replicationUrl = getReplicationURL();
Replication puller = database.createPullReplication(replicationUrl);
String cookieName = "foo";
String cookieVal = "bar";
boolean isSecure = false;
boolean httpOnly = false;
// expiration date - 1 day from now
Calendar cal = Calendar.getInstance();
cal.setTime(new Date());
int numDaysToAdd = 1;
cal.add(Calendar.DATE, numDaysToAdd);
Date expirationDate = cal.getTime();
// set the cookie
puller.setCookie(cookieName, cookieVal, "", expirationDate, isSecure, httpOnly);
// make sure it made it into cookie store and has expected params
CookieStore cookieStore = puller.getClientFactory().getCookieStore();
List<Cookie> cookies = cookieStore.getCookies();
assertEquals(1, cookies.size());
Cookie cookie = cookies.get(0);
assertEquals(cookieName, cookie.getName());
assertEquals(cookieVal, cookie.getValue());
assertEquals(replicationUrl.getHost(), cookie.getDomain());
assertEquals(replicationUrl.getPath(), cookie.getPath());
assertEquals(expirationDate, cookie.getExpiryDate());
assertEquals(isSecure, cookie.isSecure());
// add a second cookie
String cookieName2 = "foo2";
puller.setCookie(cookieName2, cookieVal, "", expirationDate, isSecure, false);
assertEquals(2, cookieStore.getCookies().size());
// delete cookie
puller.deleteCookie(cookieName2);
// should only have the original cookie left
assertEquals(1, cookieStore.getCookies().size());
assertEquals(cookieName, cookieStore.getCookies().get(0).getName());
}
public void testChangesFeedWithPurgedDoc() throws Exception {
//generate documents ids
String doc1Id = "doc1-" + System.currentTimeMillis();
String doc2Id = "doc2-" + System.currentTimeMillis();
String doc3Id = "doc3-" + System.currentTimeMillis();
//generate mock documents
final MockDocumentGet.MockDocument mockDocument1 = new MockDocumentGet.MockDocument(
doc1Id, "1-a000", 1);
mockDocument1.setJsonMap(MockHelper.generateRandomJsonMap());
final MockDocumentGet.MockDocument mockDocument2 = new MockDocumentGet.MockDocument(
doc2Id, "1-b000", 2);
mockDocument2.setJsonMap(MockHelper.generateRandomJsonMap());
final MockDocumentGet.MockDocument mockDocument3 = new MockDocumentGet.MockDocument(
doc3Id, "1-c000", 3);
mockDocument3.setJsonMap(MockHelper.generateRandomJsonMap());
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
try {
//add response to _local request
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
//add response to _changes request
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument1));
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument2));
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument3));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// doc1 response
MockDocumentGet mockDocumentGet1 = new MockDocumentGet(mockDocument1);
dispatcher.enqueueResponse(mockDocument1.getDocPathRegex(), mockDocumentGet1.generateMockResponse());
// doc2 missing reponse
MockResponse missingDocumentMockResponse = new MockResponse();
MockHelper.set404NotFoundJson(missingDocumentMockResponse);
dispatcher.enqueueResponse(mockDocument2.getDocPathRegex(), missingDocumentMockResponse);
// doc3 response
MockDocumentGet mockDocumentGet3 = new MockDocumentGet(mockDocument3);
dispatcher.enqueueResponse(mockDocument3.getDocPathRegex(), mockDocumentGet3.generateMockResponse());
// checkpoint PUT response
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// start mock server
server.play();
//create url for replication
URL baseUrl = server.getUrl("/db");
//create replication
Replication pullReplication = database.createPullReplication(baseUrl);
pullReplication.setContinuous(false);
//add change listener to notify when the replication is finished
CountDownLatch replicationFinishedContCountDownLatch = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver =
new ReplicationFinishedObserver(replicationFinishedContCountDownLatch);
pullReplication.addChangeListener(replicationFinishedObserver);
//start replication
pullReplication.start();
boolean success = replicationFinishedContCountDownLatch.await(100, TimeUnit.SECONDS);
assertTrue(success);
if (pullReplication.getLastError() != null) {
Log.d(TAG, "Replication had error: " + ((HttpResponseException) pullReplication.getLastError()).getStatusCode());
}
//assert document 1 was correctly pulled
Document doc1 = database.getDocument(doc1Id);
assertNotNull(doc1);
assertNotNull(doc1.getCurrentRevision());
//assert it was impossible to pull doc2
Document doc2 = database.getDocument(doc2Id);
assertNotNull(doc2);
assertNull(doc2.getCurrentRevision());
//assert it was possible to pull doc3
Document doc3 = database.getDocument(doc3Id);
assertNotNull(doc3);
assertNotNull(doc3.getCurrentRevision());
// wait until the replicator PUT's checkpoint with mockDocument3's sequence
waitForPutCheckpointRequestWithSeq(dispatcher, mockDocument3.getDocSeq());
//last saved seq must be equal to last pulled document seq
String doc3Seq = Integer.toString(mockDocument3.getDocSeq());
String lastSequence = database.lastSequenceWithCheckpointId(pullReplication.remoteCheckpointDocID());
assertEquals(doc3Seq, lastSequence);
} finally {
//stop mock server
server.shutdown();
}
}
public void testPushPurgedDoc() throws Throwable {
int numBulkDocRequests = 0;
HttpPost lastBulkDocsRequest = null;
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("testName", "testPurgeDocument");
Document doc = createDocumentWithProperties(database, properties);
assertNotNull(doc);
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderRevDiffsAllMissing();
mockHttpClient.setResponseDelayMilliseconds(250);
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
HttpClientFactory mockHttpClientFactory = new HttpClientFactory() {
@Override
public HttpClient getHttpClient() {
return mockHttpClient;
}
@Override
public void addCookies(List<Cookie> cookies) {
}
@Override
public void deleteCookie(String name) {
}
@Override
public CookieStore getCookieStore() {
return null;
}
};
URL remote = getReplicationURL();
manager.setDefaultHttpClientFactory(mockHttpClientFactory);
Replication pusher = database.createPushReplication(remote);
pusher.setContinuous(true);
final CountDownLatch replicationCaughtUpSignal = new CountDownLatch(1);
pusher.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
final int changesCount = event.getSource().getChangesCount();
final int completedChangesCount = event.getSource().getCompletedChangesCount();
String msg = String.format("changes: %d completed changes: %d", changesCount, completedChangesCount);
Log.d(TAG, msg);
if (changesCount == completedChangesCount && changesCount != 0) {
replicationCaughtUpSignal.countDown();
}
}
});
pusher.start();
// wait until that doc is pushed
boolean didNotTimeOut = replicationCaughtUpSignal.await(60, TimeUnit.SECONDS);
assertTrue(didNotTimeOut);
// at this point, we should have captured exactly 1 bulk docs request
numBulkDocRequests = 0;
for (HttpRequest capturedRequest : mockHttpClient.getCapturedRequests()) {
if (capturedRequest instanceof HttpPost && ((HttpPost) capturedRequest).getURI().toString().endsWith("_bulk_docs")) {
lastBulkDocsRequest = (HttpPost) capturedRequest;
numBulkDocRequests += 1;
}
}
assertEquals(1, numBulkDocRequests);
// that bulk docs request should have the "start" key under its _revisions
Map<String, Object> jsonMap = mockHttpClient.getJsonMapFromRequest((HttpPost) lastBulkDocsRequest);
List docs = (List) jsonMap.get("docs");
Map<String, Object> onlyDoc = (Map) docs.get(0);
Map<String, Object> revisions = (Map) onlyDoc.get("_revisions");
assertTrue(revisions.containsKey("start"));
// now add a new revision, which will trigger the pusher to try to push it
properties = new HashMap<String, Object>();
properties.put("testName2", "update doc");
UnsavedRevision unsavedRevision = doc.createRevision();
unsavedRevision.setUserProperties(properties);
unsavedRevision.save();
// but then immediately purge it
doc.purge();
// wait for a while to give the replicator a chance to push it
// (it should not actually push anything)
Thread.sleep(5 * 1000);
// we should not have gotten any more _bulk_docs requests, because
// the replicator should not have pushed anything else.
// (in the case of the bug, it was trying to push the purged revision)
numBulkDocRequests = 0;
for (HttpRequest capturedRequest : mockHttpClient.getCapturedRequests()) {
if (capturedRequest instanceof HttpPost && ((HttpPost) capturedRequest).getURI().toString().endsWith("_bulk_docs")) {
numBulkDocRequests += 1;
}
}
assertEquals(1, numBulkDocRequests);
stopReplication(pusher);
}
public void testPusherBatching() throws Throwable {
int previous = ReplicationInternal.INBOX_CAPACITY;
ReplicationInternal.INBOX_CAPACITY = 5;
try {
// create a bunch local documents
int numDocsToSend = ReplicationInternal.INBOX_CAPACITY * 3;
for (int i = 0; i < numDocsToSend; i++) {
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("testPusherBatching", i);
createDocumentWithProperties(database, properties);
}
// kick off a one time push replication to a mock
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
HttpClientFactory mockHttpClientFactory = mockFactoryFactory(mockHttpClient);
URL remote = getReplicationURL();
manager.setDefaultHttpClientFactory(mockHttpClientFactory);
Replication pusher = database.createPushReplication(remote);
runReplication(pusher);
assertNull(pusher.getLastError());
int numDocsSent = 0;
// verify that only INBOX_SIZE documents are included in any given bulk post request
List<HttpRequest> capturedRequests = mockHttpClient.getCapturedRequests();
for (HttpRequest capturedRequest : capturedRequests) {
if (capturedRequest instanceof HttpPost) {
HttpPost capturedPostRequest = (HttpPost) capturedRequest;
if (capturedPostRequest.getURI().getPath().endsWith("_bulk_docs")) {
ArrayList docs = CustomizableMockHttpClient.extractDocsFromBulkDocsPost(capturedRequest);
String msg = "# of bulk docs pushed should be <= INBOX_CAPACITY";
assertTrue(msg, docs.size() <= ReplicationInternal.INBOX_CAPACITY);
numDocsSent += docs.size();
}
}
}
assertEquals(numDocsToSend, numDocsSent);
} finally {
ReplicationInternal.INBOX_CAPACITY = previous;
}
}
public void failingTestPullerGzipped() throws Throwable {
// TODO: rewrite w/ MockWebserver
/*String docIdTimestamp = Long.toString(System.currentTimeMillis());
final String doc1Id = String.format("doc1-%s", docIdTimestamp);
String attachmentName = "attachment.png";
addDocWithId(doc1Id, attachmentName, true);
doPullReplication();
Log.d(TAG, "Fetching doc1 via id: " + doc1Id);
Document doc1 = database.getDocument(doc1Id);
assertNotNull(doc1);
assertTrue(doc1.getCurrentRevisionId().startsWith("1-"));
assertEquals(1, doc1.getProperties().get("foo"));
Attachment attachment = doc1.getCurrentRevision().getAttachment(attachmentName);
assertTrue(attachment.getLength() > 0);
assertTrue(attachment.getGZipped());
InputStream is = attachment.getContent();
byte[] receivedBytes = TextUtils.read(is);
is.close();
InputStream attachmentStream = getAsset(attachmentName);
byte[] actualBytes = TextUtils.read(attachmentStream);
Assert.assertEquals(actualBytes.length, receivedBytes.length);
Assert.assertEquals(actualBytes, receivedBytes);*/
}
/**
* Verify that validation blocks are called correctly for docs
* pulled from the sync gateway.
* <p/>
* - Add doc to (mock) sync gateway
* - Add validation function that will reject that doc
* - Do a pull replication
* - Assert that the doc does _not_ make it into the db
*/
public void testValidationBlockCalled() throws Throwable {
final MockDocumentGet.MockDocument mockDocument = new MockDocumentGet.MockDocument("doc1", "1-3e28", 1);
mockDocument.setJsonMap(MockHelper.generateRandomJsonMap());
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// doc response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDocument);
dispatcher.enqueueResponse(mockDocument.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// checkpoint PUT response
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, new MockCheckpointPut());
// start mock server
server.play();
// Add Validation block
database.setValidation("testValidationBlockCalled", new Validator() {
@Override
public void validate(Revision newRevision, ValidationContext context) {
if (newRevision.getDocument().getId().equals(mockDocument.getDocId())) {
context.reject("Reject");
}
}
});
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
runReplication(pullReplication);
waitForPutCheckpointRequestWithSeq(dispatcher, mockDocument.getDocSeq());
// assert doc is not in local db
Document doc = database.getDocument(mockDocument.getDocId());
assertNull(doc.getCurrentRevision()); // doc should have been rejected by validation, and therefore not present
} finally {
server.shutdown();
}
}
public void testMockPullerRestart() throws Exception {
final int numMockRemoteDocs = 20; // must be multiple of 10!
final AtomicInteger numDocsPulledLocally = new AtomicInteger(0);
MockDispatcher dispatcher = new MockDispatcher();
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
int numDocsPerChangesResponse = numMockRemoteDocs / 10;
MockWebServer server = MockHelper.getPreloadedPullTargetMockCouchDB(dispatcher, numMockRemoteDocs, numDocsPerChangesResponse);
try {
server.play();
final CountDownLatch receivedAllDocs = new CountDownLatch(1);
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
// it should go idle twice, hence countdown latch = 2
final CountDownLatch replicationIdleFirstTime = new CountDownLatch(1);
final CountDownLatch replicationIdleSecondTime = new CountDownLatch(2);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getTransition() != null && event.getTransition().getDestination() == ReplicationState.IDLE) {
replicationIdleFirstTime.countDown();
replicationIdleSecondTime.countDown();
}
}
});
database.addChangeListener(new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
List<DocumentChange> changes = event.getChanges();
for (DocumentChange change : changes) {
numDocsPulledLocally.addAndGet(1);
}
if (numDocsPulledLocally.get() == numMockRemoteDocs) {
receivedAllDocs.countDown();
}
}
});
pullReplication.start();
// wait until we received all mock docs or timeout occurs
boolean success = receivedAllDocs.await(60, TimeUnit.SECONDS);
assertTrue(success);
// wait until replication goes idle
success = replicationIdleFirstTime.await(60, TimeUnit.SECONDS);
assertTrue(success);
pullReplication.restart();
// wait until replication goes idle again
success = replicationIdleSecondTime.await(60, TimeUnit.SECONDS);
assertTrue(success);
stopReplication(pullReplication);
} finally {
// cleanup / shutdown
server.shutdown();
}
}
public void testRunReplicationWithError() throws Exception {
HttpClientFactory mockHttpClientFactory = new HttpClientFactory() {
@Override
public HttpClient getHttpClient() {
CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
int statusCode = 406;
mockHttpClient.addResponderFailAllRequests(statusCode);
return mockHttpClient;
}
@Override
public void addCookies(List<Cookie> cookies) {
}
@Override
public void deleteCookie(String name) {
}
@Override
public CookieStore getCookieStore() {
return null;
}
};
manager.setDefaultHttpClientFactory(mockHttpClientFactory);
Replication r1 = database.createPushReplication(getReplicationURL());
final CountDownLatch changeEventError = new CountDownLatch(1);
r1.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
Log.d(TAG, "change event: %s", event);
if (event.getError() != null) {
changeEventError.countDown();
}
}
});
Assert.assertFalse(r1.isContinuous());
runReplication(r1);
// It should have failed with a 404:
Assert.assertEquals(0, r1.getCompletedChangesCount());
Assert.assertEquals(0, r1.getChangesCount());
Assert.assertNotNull(r1.getLastError());
boolean success = changeEventError.await(5, TimeUnit.SECONDS);
Assert.assertTrue(success);
}
public void testBuildRelativeURLString() throws Exception {
String dbUrlString = "http://10.0.0.3:4984/todos/";
Replication replication = database.createPullReplication(new URL(dbUrlString));
String relativeUrlString = replication.buildRelativeURLString("foo");
String expected = "http://10.0.0.3:4984/todos/foo";
Assert.assertEquals(expected, relativeUrlString);
}
public void testBuildRelativeURLStringWithLeadingSlash() throws Exception {
String dbUrlString = "http://10.0.0.3:4984/todos/";
Replication replication = database.createPullReplication(new URL(dbUrlString));
String relativeUrlString = replication.buildRelativeURLString("/foo");
String expected = "http://10.0.0.3:4984/todos/foo";
Assert.assertEquals(expected, relativeUrlString);
}
public void testChannels() throws Exception {
URL remote = getReplicationURL();
Replication replicator = database.createPullReplication(remote);
List<String> channels = new ArrayList<String>();
channels.add("chan1");
channels.add("chan2");
replicator.setChannels(channels);
Assert.assertEquals(channels, replicator.getChannels());
replicator.setChannels(null);
Assert.assertTrue(replicator.getChannels().isEmpty());
}
public void testChannelsMore() throws MalformedURLException, CouchbaseLiteException {
Database db = startDatabase();
URL fakeRemoteURL = new URL("http://couchbase.com/no_such_db");
Replication r1 = db.createPullReplication(fakeRemoteURL);
assertTrue(r1.getChannels().isEmpty());
r1.setFilter("foo/bar");
assertTrue(r1.getChannels().isEmpty());
Map<String, Object> filterParams = new HashMap<String, Object>();
filterParams.put("a", "b");
r1.setFilterParams(filterParams);
assertTrue(r1.getChannels().isEmpty());
r1.setChannels(null);
assertEquals("foo/bar", r1.getFilter());
assertEquals(filterParams, r1.getFilterParams());
List<String> channels = new ArrayList<String>();
channels.add("NBC");
channels.add("MTV");
r1.setChannels(channels);
assertEquals(channels, r1.getChannels());
assertEquals("sync_gateway/bychannel", r1.getFilter());
filterParams = new HashMap<String, Object>();
filterParams.put("channels", "NBC,MTV");
assertEquals(filterParams, r1.getFilterParams());
r1.setChannels(null);
assertEquals(r1.getFilter(), null);
assertEquals(null, r1.getFilterParams());
}
public void testPushReplicationRecoverableError() throws Exception {
boolean expectReplicatorError = false;
runPushReplicationWithTransientError("HTTP/1.1 503 Service Unavailable", expectReplicatorError);
}
public void testPushReplicationNonRecoverableError() throws Exception {
boolean expectReplicatorError = true;
runPushReplicationWithTransientError("HTTP/1.1 404 Not Found", expectReplicatorError);
}
public void runPushReplicationWithTransientError(String status, boolean expectReplicatorError) throws Exception {
String doc1Id = "doc1";
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
server.play();
// add some documents
Document doc1 = createDocumentForPushReplication(doc1Id, null, null);
// checkpoint GET response w/ 404 + respond to all PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(50);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// 1st _bulk_docs response -- transient error
MockResponse response = new MockResponse().setStatus(status);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, response);
// 2nd _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// run replication
Replication pusher = database.createPushReplication(server.getUrl("/db"));
pusher.setContinuous(false);
runReplication(pusher);
if (expectReplicatorError == true) {
assertNotNull(pusher.getLastError());
} else {
assertNull(pusher.getLastError());
}
if (expectReplicatorError == false) {
int expectedLastSequence = 1;
Log.d(TAG, "waiting for put checkpoint with lastSequence: %d", expectedLastSequence);
List<RecordedRequest> checkpointRequests = waitForPutCheckpointRequestWithSequence(dispatcher, expectedLastSequence);
Log.d(TAG, "done waiting for put checkpoint with lastSequence: %d", expectedLastSequence);
validateCheckpointRequestsRevisions(checkpointRequests);
// assert our local sequence matches what is expected
String lastSequence = database.lastSequenceWithCheckpointId(pusher.remoteCheckpointDocID());
assertEquals(Integer.toString(expectedLastSequence), lastSequence);
// assert completed count makes sense
assertEquals(pusher.getChangesCount(), pusher.getCompletedChangesCount());
}
} finally {
// Shut down the server. Instances cannot be reused.
server.shutdown();
}
}
/**
* Verify that running a one-shot push replication will complete when run against a
* mock server that throws io exceptions on every request.
*/
public void testOneShotReplicationErrorNotification() throws Throwable {
int previous = RemoteRequestRetry.RETRY_DELAY_MS;
RemoteRequestRetry.RETRY_DELAY_MS = 5;
try {
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderThrowExceptionAllRequests();
URL remote = getReplicationURL();
manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient));
Replication pusher = database.createPushReplication(remote);
runReplication(pusher);
assertTrue(pusher.getLastError() != null);
} finally {
RemoteRequestRetry.RETRY_DELAY_MS = previous;
}
}
/**
* Verify that running a continuous push replication will emit a change while
* in an error state when run against a mock server that returns 500 Internal Server
* errors on every request.
*/
public void testContinuousReplicationErrorNotification() throws Throwable {
int previous = RemoteRequestRetry.RETRY_DELAY_MS;
RemoteRequestRetry.RETRY_DELAY_MS = 5;
try {
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderThrowExceptionAllRequests();
URL remote = getReplicationURL();
manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient));
Replication pusher = database.createPushReplication(remote);
pusher.setContinuous(true);
// add replication observer
final CountDownLatch countDownLatch = new CountDownLatch(1);
pusher.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getError() != null) {
countDownLatch.countDown();
}
}
});
// start replication
pusher.start();
boolean success = countDownLatch.await(30, TimeUnit.SECONDS);
assertTrue(success);
stopReplication(pusher);
} finally {
RemoteRequestRetry.RETRY_DELAY_MS = previous;
}
}
/**
* Test for the goOffline() method.
*/
public void testGoOffline() throws Exception {
final int numMockDocsToServe = 2;
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
try {
server.play();
// mock documents to be pulled
MockDocumentGet.MockDocument mockDoc1 = new MockDocumentGet.MockDocument("doc1", "1-5e38", 1);
mockDoc1.setJsonMap(MockHelper.generateRandomJsonMap());
mockDoc1.setAttachmentName("attachment.png");
MockDocumentGet.MockDocument mockDoc2 = new MockDocumentGet.MockDocument("doc2", "1-563b", 2);
mockDoc2.setJsonMap(MockHelper.generateRandomJsonMap());
mockDoc2.setAttachmentName("attachment2.png");
// fake checkpoint PUT and GET response w/ 404
MockCheckpointPut fakeCheckpointResponse = new MockCheckpointPut();
fakeCheckpointResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// _changes response with docs
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc1));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// next _changes response will block (eg, longpoll reponse with no changes to return)
MockChangesFeed mockChangesFeedEmpty = new MockChangesFeed();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedEmpty.generateMockResponse());
// doc1 response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDoc1);
dispatcher.enqueueResponse(mockDoc1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// doc2 response
mockDocumentGet = new MockDocumentGet(mockDoc2);
dispatcher.enqueueResponse(mockDoc2.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// create replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
// add a change listener
final CountDownLatch idleCountdownLatch = new CountDownLatch(1);
final CountDownLatch receivedAllDocs = new CountDownLatch(1);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
Log.e(Log.TAG_SYNC, "event.getCompletedChangeCount() = " + event.getCompletedChangeCount());
if (event.getTransition() != null && event.getTransition().getDestination() == ReplicationState.IDLE) {
idleCountdownLatch.countDown();
}
if (event.getCompletedChangeCount() == numMockDocsToServe) {
receivedAllDocs.countDown();
}
}
});
// start replication
pullReplication.start();
// wait until it goes into idle state
boolean success = idleCountdownLatch.await(60, TimeUnit.SECONDS);
assertTrue(success);
// WORKAROUND: With CBL Java on Jenkins, Replicator becomes IDLE state before processing doc1. (NOT 100% REPRODUCIBLE)
// NOTE: 03/20/2014 This is also observable with on Standard Android emulator with ARM. (NOT 100% REPRODUCIBLE)
// NOTE: Build.BRAND.equalsIgnoreCase("generic") is only for Android, not for regular Java.
// So, till solve IDLE state issue, always wait 5 seconds.
try {
Thread.sleep(5 * 1000);
} catch (Exception e) {
}
// put the replication offline
putReplicationOffline(pullReplication);
// at this point, we shouldn't have received all of the docs yet.
assertTrue(receivedAllDocs.getCount() > 0);
// return some more docs on _changes feed
MockChangesFeed mockChangesFeed2 = new MockChangesFeed();
mockChangesFeed2.add(new MockChangesFeed.MockChangedDoc(mockDoc2));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed2.generateMockResponse());
// put the replication online (should see the new docs)
putReplicationOnline(pullReplication);
// wait until we receive all the docs
success = receivedAllDocs.await(60, TimeUnit.SECONDS);
assertTrue(success);
// wait until we try to PUT a checkpoint request with doc2's sequence
waitForPutCheckpointRequestWithSeq(dispatcher, mockDoc2.getDocSeq());
// make sure all docs in local db
Map<String, Object> allDocs = database.getAllDocs(new QueryOptions());
Integer totalRows = (Integer) allDocs.get("total_rows");
List rows = (List) allDocs.get("rows");
assertEquals(numMockDocsToServe, totalRows.intValue());
assertEquals(numMockDocsToServe, rows.size());
// cleanup
stopReplication(pullReplication);
} finally {
server.shutdown();
}
}
private void putReplicationOffline(Replication replication) throws InterruptedException {
Log.d(Log.TAG, "putReplicationOffline: %s", replication);
// this was a useless test, the replication wasn't even started
final CountDownLatch wentOffline = new CountDownLatch(1);
Replication.ChangeListener changeListener = new ReplicationOfflineObserver(wentOffline);
replication.addChangeListener(changeListener);
replication.goOffline();
boolean succeeded = wentOffline.await(30, TimeUnit.SECONDS);
assertTrue(succeeded);
replication.removeChangeListener(changeListener);
Log.d(Log.TAG, "/putReplicationOffline: %s", replication);
}
private void putReplicationOnline(Replication replication) throws InterruptedException {
Log.d(Log.TAG, "putReplicationOnline: %s", replication);
// this was a useless test, the replication wasn't even started
final CountDownLatch wentOnline = new CountDownLatch(1);
Replication.ChangeListener changeListener = new ReplicationActiveObserver(wentOnline);
replication.addChangeListener(changeListener);
replication.goOnline();
boolean succeeded = wentOnline.await(30, TimeUnit.SECONDS);
assertTrue(succeeded);
replication.removeChangeListener(changeListener);
Log.d(Log.TAG, "/putReplicationOnline: %s", replication);
}
public void testReplicationOnlineExtraneousChangeTrackers() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
try {
// add sticky checkpoint GET response w/ 404
MockCheckpointGet fakeCheckpointResponse = new MockCheckpointGet();
fakeCheckpointResponse.set404(true);
fakeCheckpointResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// add sticky _changes response to feed=longpoll that just blocks for 60 seconds to emulate
// server that doesn't have any new changes
MockChangesFeedNoResponse mockChangesFeedNoResponse = new MockChangesFeedNoResponse();
mockChangesFeedNoResponse.setDelayMs(60 * 1000);
mockChangesFeedNoResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES_LONGPOLL, mockChangesFeedNoResponse);
// add _changes response to feed=normal that returns empty _changes feed immediately
MockChangesFeed mockChangesFeed = new MockChangesFeed();
MockResponse mockResponse = mockChangesFeed.generateMockResponse();
for (int i = 0; i < 500; i++) { // TODO: use setSticky instead of workaround to add a ton of mock responses
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES_NORMAL, new WrappedSmartMockResponse(mockResponse));
}
// start mock server
server.play();
//create url for replication
URL baseUrl = server.getUrl("/db");
//create replication
final Replication pullReplication = database.createPullReplication(baseUrl);
pullReplication.setContinuous(true);
pullReplication.start();
// wait until we get a request to the _changes feed
RecordedRequest changesReq = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHANGES_LONGPOLL);
assertNotNull(changesReq);
putReplicationOffline(pullReplication);
// at this point since we called takeRequest earlier, our recorded _changes request queue should be empty
assertNull(dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES_LONGPOLL));
// put replication online 10 times
for (int i = 0; i < 10; i++) {
pullReplication.goOnline();
}
// sleep for a while to give things a chance to start
Log.d(TAG, "sleeping for 2 seconds");
Thread.sleep(2 * 1000);
Log.d(TAG, "done sleeping");
// how many _changes feed requests has the replicator made since going online?
int numChangesRequests = 0;
while ((changesReq = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES_LONGPOLL)) != null) {
Log.d(TAG, "changesReq: %s", changesReq);
numChangesRequests += 1;
}
// assert that there was only one _changes feed request
assertEquals(1, numChangesRequests);
// shutdown
stopReplication(pullReplication);
} finally {
server.shutdown();
}
}
/**
* Test goOffline() method in the context of a continuous pusher.
* <p/>
* - 1. Add a local document
* - 2. Kick off continuous push replication
* - 3. Wait for document to be pushed
* - 4. Call goOffline()
* - 6. Call goOnline()
* - 5. Add a 2nd local document
* - 7. Wait for 2nd document to be pushed
*
* @throws Exception
*/
public void testGoOfflinePusher() throws Exception {
int previous = RemoteRequestRetry.RETRY_DELAY_MS;
RemoteRequestRetry.RETRY_DELAY_MS = 5;
try {
// 1. Add a local document
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("testGoOfflinePusher", "1");
Document doc1 = createDocumentWithProperties(database, properties);
// create mock server
MockWebServer server = new MockWebServer();
try {
MockDispatcher dispatcher = new MockDispatcher();
server.setDispatcher(dispatcher);
server.play();
// checkpoint PUT response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
mockRevsDiff.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
mockBulkDocs.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// 2. Kick off continuous push replication
Replication replicator = database.createPushReplication(server.getUrl("/db"));
replicator.setContinuous(true);
CountDownLatch replicationIdleSignal = new CountDownLatch(1);
ReplicationIdleObserver replicationIdleObserver = new ReplicationIdleObserver(replicationIdleSignal);
replicator.addChangeListener(replicationIdleObserver);
replicator.start();
// 3. Wait for document to be pushed
// wait until replication goes idle
boolean successful = replicationIdleSignal.await(30, TimeUnit.SECONDS);
assertTrue(successful);
// wait until mock server gets the checkpoint PUT request
boolean foundCheckpointPut = false;
String expectedLastSequence = "1";
while (!foundCheckpointPut) {
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHECKPOINT);
if (request.getMethod().equals("PUT")) {
foundCheckpointPut = true;
Assert.assertTrue(request.getUtf8Body().indexOf(expectedLastSequence) != -1);
// wait until mock server responds to the checkpoint PUT request
dispatcher.takeRecordedResponseBlocking(request);
}
}
// make some assertions about the outgoing _bulk_docs requests for first doc
RecordedRequest bulkDocsRequest1 = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(bulkDocsRequest1);
assertBulkDocJsonContainsDoc(bulkDocsRequest1, doc1);
// 4. Call goOffline()
putReplicationOffline(replicator);
// 5. Add a 2nd local document
properties = new HashMap<String, Object>();
properties.put("testGoOfflinePusher", "2");
Document doc2 = createDocumentWithProperties(database, properties);
// make sure if push replicator does not send request during offline.
try {
Thread.sleep(1000 * 3);
} catch (Exception ex) {
}
// make sure not receive _bulk_docs during offline.
RecordedRequest bulkDocsRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_DOCS);
assertNull(bulkDocsRequest);
// 6. Call goOnline()
putReplicationOnline(replicator);
// wait until mock server gets the 2nd checkpoint PUT request
foundCheckpointPut = false;
expectedLastSequence = "2";
while (!foundCheckpointPut) {
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHECKPOINT);
if (request.getMethod().equals("PUT")) {
foundCheckpointPut = true;
Assert.assertTrue(request.getUtf8Body().indexOf(expectedLastSequence) != -1);
// wait until mock server responds to the checkpoint PUT request
dispatcher.takeRecordedResponseBlocking(request);
}
}
// make some assertions about the outgoing _bulk_docs requests for second doc
RecordedRequest bulkDocsRequest2 = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(bulkDocsRequest2);
assertBulkDocJsonContainsDoc(bulkDocsRequest2, doc2);
// cleanup
stopReplication(replicator);
} finally {
server.shutdown();
}
} finally {
RemoteRequestRetry.RETRY_DELAY_MS = previous;
}
}
/**
* Verify that when a replication runs into an auth error, it stops
* and the lastError() method returns that error.
*/
public void testReplicatorErrorStatus() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// fake _session response
MockSessionGet mockSessionGet = new MockSessionGet();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_SESSION, mockSessionGet.generateMockResponse());
// fake _facebook response
MockFacebookAuthPost mockFacebookAuthPost = new MockFacebookAuthPost();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_FACEBOOK_AUTH, mockFacebookAuthPost.generateMockResponse());
// start mock server
server.play();
// register bogus fb token
Authenticator facebookAuthenticator = AuthenticatorFactory.createFacebookAuthenticator("fake_access_token");
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setAuthenticator(facebookAuthenticator);
pullReplication.setContinuous(false);
runReplication(pullReplication);
// run replicator and make sure it has an error
assertNotNull(pullReplication.getLastError());
assertTrue(pullReplication.getLastError() instanceof HttpResponseException);
assertEquals(401 /* unauthorized */, ((HttpResponseException) pullReplication.getLastError()).getStatusCode());
// assert that the replicator sent the requests we expected it to send
RecordedRequest sessionReqeust = dispatcher.takeRequest(MockHelper.PATH_REGEX_SESSION);
assertNotNull(sessionReqeust);
RecordedRequest facebookRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_FACEBOOK_AUTH);
assertNotNull(facebookRequest);
dispatcher.verifyAllRecordedRequestsTaken();
} finally {
server.shutdown();
}
}
public void testGetReplicatorWithCustomHeader() throws Throwable {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint PUT or GET response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(500);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
server.play();
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("source", DEFAULT_TEST_DB);
// target with custom headers (cookie)
Map<String, Object> headers = new HashMap<String, Object>();
String coolieVal = "SyncGatewaySession=c38687c2696688a";
headers.put("Cookie", coolieVal);
Map<String, Object> targetProperties = new HashMap<String, Object>();
targetProperties.put("url", server.getUrl("/db").toExternalForm());
targetProperties.put("headers", headers);
properties.put("target", targetProperties);
Replication replicator = manager.getReplicator(properties);
assertNotNull(replicator);
assertEquals(server.getUrl("/db").toExternalForm(), replicator.getRemoteUrl().toExternalForm());
assertTrue(!replicator.isPull());
assertFalse(replicator.isContinuous());
assertFalse(replicator.isRunning());
assertTrue(replicator.getHeaders().containsKey("Cookie"));
assertEquals(replicator.getHeaders().get("Cookie"), coolieVal);
// add replication observer
CountDownLatch replicationDoneSignal = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal);
replicator.addChangeListener(replicationFinishedObserver);
// start the replicator
Log.d(TAG, "Starting replicator " + replicator);
replicator.start();
final CountDownLatch replicationStarted = new CountDownLatch(1);
replicator.addChangeListener(new ReplicationActiveObserver(replicationStarted));
boolean success = replicationStarted.await(30, TimeUnit.SECONDS);
assertTrue(success);
// now lets lookup existing replicator and stop it
Log.d(TAG, "Looking up replicator");
properties.put("cancel", true);
Replication activeReplicator = manager.getReplicator(properties);
Log.d(TAG, "Found replicator " + activeReplicator + " and calling stop()");
activeReplicator.stop();
Log.d(TAG, "called stop(), waiting for it to finish");
// wait for replication to finish
boolean didNotTimeOut = replicationDoneSignal.await(180, TimeUnit.SECONDS);
Log.d(TAG, "replicationDoneSignal.await done, didNotTimeOut: " + didNotTimeOut);
assertTrue(didNotTimeOut);
assertFalse(activeReplicator.isRunning());
} finally {
server.shutdown();
}
}
public void testGetReplicator() throws Throwable {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint PUT or GET response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(500);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
server.play();
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("source", DEFAULT_TEST_DB);
properties.put("target", server.getUrl("/db").toExternalForm());
Replication replicator = manager.getReplicator(properties);
assertNotNull(replicator);
assertEquals(server.getUrl("/db").toExternalForm(), replicator.getRemoteUrl().toExternalForm());
assertTrue(!replicator.isPull());
assertFalse(replicator.isContinuous());
assertFalse(replicator.isRunning());
// add replication observer
CountDownLatch replicationDoneSignal = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal);
replicator.addChangeListener(replicationFinishedObserver);
// start the replicator
Log.d(TAG, "Starting replicator " + replicator);
replicator.start();
final CountDownLatch replicationStarted = new CountDownLatch(1);
replicator.addChangeListener(new ReplicationActiveObserver(replicationStarted));
boolean success = replicationStarted.await(30, TimeUnit.SECONDS);
assertTrue(success);
// now lets lookup existing replicator and stop it
Log.d(TAG, "Looking up replicator");
properties.put("cancel", true);
Replication activeReplicator = manager.getReplicator(properties);
Log.d(TAG, "Found replicator " + activeReplicator + " and calling stop()");
activeReplicator.stop();
Log.d(TAG, "called stop(), waiting for it to finish");
// wait for replication to finish
boolean didNotTimeOut = replicationDoneSignal.await(180, TimeUnit.SECONDS);
Log.d(TAG, "replicationDoneSignal.await done, didNotTimeOut: " + didNotTimeOut);
assertTrue(didNotTimeOut);
assertFalse(activeReplicator.isRunning());
} finally {
server.shutdown();
}
}
public void testGetReplicatorWithAuth() throws Throwable {
Map<String, Object> authProperties = getReplicationAuthParsedJson();
Map<String, Object> targetProperties = new HashMap<String, Object>();
targetProperties.put("url", getReplicationURL().toExternalForm());
targetProperties.put("auth", authProperties);
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("source", DEFAULT_TEST_DB);
properties.put("target", targetProperties);
Replication replicator = manager.getReplicator(properties);
assertNotNull(replicator);
assertNotNull(replicator.getAuthenticator());
assertTrue(replicator.getAuthenticator() instanceof FacebookAuthorizer);
}
/**
* When the server returns a 409 error to a PUT checkpoint response, make
* sure it does the right thing:
* - Pull latest remote checkpoint
* - Try to push checkpiont again (this time passing latest rev)
*
* @throws Exception
*/
public void testPutCheckpoint409Recovery() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// mock documents to be pulled
MockDocumentGet.MockDocument mockDoc1 = new MockDocumentGet.MockDocument("doc1", "1-5e38", 1);
mockDoc1.setJsonMap(MockHelper.generateRandomJsonMap());
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc1));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// doc1 response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDoc1);
dispatcher.enqueueResponse(mockDoc1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// respond with 409 error to mock checkpoint PUT
MockResponse checkpointResponse409 = new MockResponse();
checkpointResponse409.setStatus("HTTP/1.1 409 CONFLICT");
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, checkpointResponse409);
// the replicator should then try to do a checkpoint GET, and in this case
// it should return a value with a rev id
MockCheckpointGet mockCheckpointGet = new MockCheckpointGet();
mockCheckpointGet.setOk("true");
mockCheckpointGet.setRev("0-1");
mockCheckpointGet.setLastSequence("0");
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointGet);
// the replicator should then try a checkpoint PUT again
// and we should respond with a 201
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// start mock server
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
// I had to set this to continuous, because in a one-shot replication it tries to
// save the checkpoint asynchronously as the replicator is shutting down, which
// breaks the retry logic in the case a 409 conflict is returned by server.
pullReplication.setContinuous(true);
pullReplication.start();
// we should have gotten two requests to PATH_REGEX_CHECKPOINT:
// PUT -> 409 Conflict
// PUT -> 201 Created
for (int i = 1; i <= 2; i++) {
Log.v(TAG, "waiting for PUT checkpoint: %d", i);
waitForPutCheckpointRequestWithSeq(dispatcher, mockDoc1.getDocSeq());
Log.d(TAG, "got PUT checkpoint: %d", i);
}
stopReplication(pullReplication);
} finally {
server.shutdown();
}
}
public void testVerifyPullerInsertsDocsWithValidation() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getPreloadedPullTargetMockCouchDB(dispatcher, 2, 2);
try {
server.play();
// Setup validation to reject document with id: doc1
database.setValidation("validateOnlyDoc1", new Validator() {
@Override
public void validate(Revision newRevision, ValidationContext context) {
if ("doc1".equals(newRevision.getDocument().getId())) {
context.reject();
}
}
});
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
runReplication(pullReplication);
assertNotNull(database);
// doc1 should not be in the store because of validation
assertNull(database.getExistingDocument("doc1"));
// doc0 should be in the store, but it wont be because of the bug.
assertNotNull(database.getExistingDocument("doc0"));
} finally {
server.shutdown();
}
}
public void testChannelsFilter() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint PUT or GET response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// start mock server
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setChannels(Arrays.asList("foo", "bar"));
runReplication(pullReplication);
// make assertions about outgoing requests from replicator -> mock
RecordedRequest getChangesFeedRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES);
assertTrue(getChangesFeedRequest.getMethod().equals("POST"));
String body = getChangesFeedRequest.getUtf8Body();
Map<String, Object> jsonMap = Manager.getObjectMapper().readValue(body, Map.class);
assertTrue(jsonMap.containsKey("filter"));
String filter = (String) jsonMap.get("filter");
assertEquals("sync_gateway/bychannel", filter);
assertTrue(jsonMap.containsKey("channels"));
String channels = (String) jsonMap.get("channels");
assertTrue(channels.contains("foo"));
assertTrue(channels.contains("bar"));
} finally {
server.shutdown();
}
}
public void testContinuousPullEntersIdleState() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// add non-sticky changes response that returns no changes
MockChangesFeed mockChangesFeed = new MockChangesFeed();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// add sticky _changes response that just blocks for 60 seconds to emulate
// server that doesn't have any new changes
MockChangesFeedNoResponse mockChangesFeedNoResponse = new MockChangesFeedNoResponse();
mockChangesFeedNoResponse.setDelayMs(60 * 1000);
mockChangesFeedNoResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedNoResponse);
server.play();
// create pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
final CountDownLatch enteredIdleState = new CountDownLatch(1);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getSource().getStatus() == Replication.ReplicationStatus.REPLICATION_IDLE) {
enteredIdleState.countDown();
}
}
});
// start pull replication
pullReplication.start();
boolean success = enteredIdleState.await(30, TimeUnit.SECONDS);
assertTrue(success);
Log.d(TAG, "Got IDLE event, stopping replication");
stopReplication(pullReplication);
} finally {
server.shutdown();
}
}
public void failingTestMockPullBulkDocsSyncGw() throws Exception {
mockPullBulkDocs(MockDispatcher.ServerType.SYNC_GW);
}
public void mockPullBulkDocs(MockDispatcher.ServerType serverType) throws Exception {
// set INBOX_CAPACITY to a smaller value so that processing times don't skew the test
int defaultCapacity = ReplicationInternal.INBOX_CAPACITY;
ReplicationInternal.INBOX_CAPACITY = 10;
int defaultDelay = ReplicationInternal.PROCESSOR_DELAY;
ReplicationInternal.PROCESSOR_DELAY = ReplicationInternal.PROCESSOR_DELAY * 10;
// serve 25 mock docs
int numMockDocsToServe = (ReplicationInternal.INBOX_CAPACITY * 2) + (ReplicationInternal.INBOX_CAPACITY / 2);
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(serverType);
try {
// mock documents to be pulled
List<MockDocumentGet.MockDocument> mockDocs = MockHelper.getMockDocuments(numMockDocsToServe);
// respond to all GET (responds with 404) and PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
for (MockDocumentGet.MockDocument mockDocument : mockDocs) {
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument));
}
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// individual doc responses (expecting it to call _bulk_docs, but just in case)
for (MockDocumentGet.MockDocument mockDocument : mockDocs) {
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDocument);
dispatcher.enqueueResponse(mockDocument.getDocPathRegex(), mockDocumentGet.generateMockResponse());
}
// _bulk_get response
MockDocumentBulkGet mockBulkGet = new MockDocumentBulkGet();
for (MockDocumentGet.MockDocument mockDocument : mockDocs) {
mockBulkGet.addDocument(mockDocument);
}
mockBulkGet.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_GET, mockBulkGet);
// start mock server
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
runReplication(pullReplication, 3 * 60);
assertTrue(pullReplication.getLastError() == null);
// wait until it pushes checkpoint of last doc
MockDocumentGet.MockDocument lastDoc = mockDocs.get(mockDocs.size() - 1);
waitForPutCheckpointRequestWithSequence(dispatcher, lastDoc.getDocSeq());
// dump out the outgoing requests for bulk docs
BlockingQueue<RecordedRequest> bulkGetRequests = dispatcher.getRequestQueueSnapshot(MockHelper.PATH_REGEX_BULK_GET);
Iterator<RecordedRequest> iterator = bulkGetRequests.iterator();
boolean first = true;
while (iterator.hasNext()) {
RecordedRequest request = iterator.next();
byte[] body = MockHelper.getUncompressedBody(request);
Map<String, Object> jsonMap = MockHelper.getJsonMapFromRequest(body);
List docs = (List) jsonMap.get("docs");
Log.w(TAG, "bulk get request: %s had %d docs", request, docs.size());
// except first one and last one, docs.size() should be (neary) equal with INBOX_CAPACTITY.
if (iterator.hasNext() && !first) {
// the bulk docs requests except for the last one should have max number of docs
// relax this a bit, so that it at least has to have greater than or equal to half max number of docs
assertTrue(docs.size() >= (ReplicationInternal.INBOX_CAPACITY / 2));
if (docs.size() != ReplicationInternal.INBOX_CAPACITY) {
Log.w(TAG, "docs.size() %d != ReplicationInternal.INBOX_CAPACITY %d", docs.size(), ReplicationInternal.INBOX_CAPACITY);
}
}
first = false;
}
} finally {
ReplicationInternal.INBOX_CAPACITY = defaultCapacity;
ReplicationInternal.PROCESSOR_DELAY = defaultDelay;
server.shutdown();
}
}
public void testCheckSessionAtPath() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
try {
// session GET response w/ 404 to /db/_session
MockResponse fakeSessionResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeSessionResponse);
WrappedSmartMockResponse wrappedSmartMockResponse = new WrappedSmartMockResponse(fakeSessionResponse);
wrappedSmartMockResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_SESSION, wrappedSmartMockResponse);
// session GET response w/ 200 OK to /_session
MockResponse fakeSessionResponse2 = new MockResponse();
Map<String, Object> responseJson = new HashMap<String, Object>();
Map<String, Object> userCtx = new HashMap<String, Object>();
userCtx.put("name", "foo");
responseJson.put("userCtx", userCtx);
fakeSessionResponse2.setBody(Manager.getObjectMapper().writeValueAsBytes(responseJson));
MockHelper.set200OKJson(fakeSessionResponse2);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_SESSION_COUCHDB, fakeSessionResponse2);
// respond to all GET/PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// start mock server
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setAuthenticator(new FacebookAuthorizer("justinbieber@glam.co"));
CountDownLatch replicationDoneSignal = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal);
pullReplication.addChangeListener(replicationFinishedObserver);
pullReplication.start();
// it should first try /db/_session
dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_SESSION);
// and then it should fallback to /_session
dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_SESSION_COUCHDB);
boolean success = replicationDoneSignal.await(30, TimeUnit.SECONDS);
Assert.assertTrue(success);
} finally {
server.shutdown();
}
}
public void testChangeTrackerError() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint GET response w/ 404 + respond to all PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// 404 response to _changes feed (sticky)
MockResponse mockChangesFeed = new MockResponse();
MockHelper.set404NotFoundJson(mockChangesFeed);
WrappedSmartMockResponse wrapped = new WrappedSmartMockResponse(mockChangesFeed);
wrapped.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, wrapped);
// start mock server
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
final CountDownLatch changeEventError = new CountDownLatch(1);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getError() != null) {
changeEventError.countDown();
}
}
});
runReplication(pullReplication);
Assert.assertTrue(pullReplication.getLastError() != null);
boolean success = changeEventError.await(5, TimeUnit.SECONDS);
Assert.assertTrue(success);
} finally {
server.shutdown();
}
}
public void testContinuousPushReplicationGoesIdleTwice() throws Exception {
public void failingTestContinuousPushReplicationGoesIdleTooSoon() throws Exception {
// smaller batch size so there are multiple requests to _bulk_docs
int previous = ReplicationInternal.INBOX_CAPACITY;
ReplicationInternal.INBOX_CAPACITY = 5;
int numDocs = ReplicationInternal.INBOX_CAPACITY * 5;
// make sure we are starting empty
assertEquals(0, database.getLastSequenceNumber());
// Add doc(s)
// NOTE: more documents causes more HTTP calls. It could be more than 4 times...
for (int i = 1; i <= numDocs; i++) {
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("doc" + String.valueOf(i), "testContinuousPushReplicationGoesIdleTooSoon " + String.valueOf(i));
final Document doc = createDocWithProperties(properties);
}
// Setup MockWebServer
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
final MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint GET response w/ 404. also receives checkpoint PUT's
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(500);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
mockRevsDiff.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
mockBulkDocs.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
server.play();
// Create replicator
Replication replication = database.createPushReplication(server.getUrl("/db"));
replication.setContinuous(true);
// special change listener for this test case.
class ReplicationTransitionToIdleObserver implements Replication.ChangeListener {
private CountDownLatch enterIdleStateSignal;
public ReplicationTransitionToIdleObserver(CountDownLatch enterIdleStateSignal) {
this.enterIdleStateSignal = enterIdleStateSignal;
}
public void changed(Replication.ChangeEvent event) {
Log.w(Log.TAG_SYNC, "[ChangeListener.changed()] event => " + event.toString());
if (event.getTransition() != null) {
if (event.getTransition().getSource() != event.getTransition().getDestination() &&
event.getTransition().getDestination() == ReplicationState.IDLE) {
Log.w(Log.TAG_SYNC, "[ChangeListener.changed()] Transition to IDLE");
Log.w(Log.TAG_SYNC, "[ChangeListener.changed()] Request Count => " + server.getRequestCount());
this.enterIdleStateSignal.countDown();
}
}
}
}
CountDownLatch enterIdleStateSignal = new CountDownLatch(1);
ReplicationTransitionToIdleObserver replicationTransitionToIdleObserver = new ReplicationTransitionToIdleObserver(enterIdleStateSignal);
replication.addChangeListener(replicationTransitionToIdleObserver);
replication.start();
// Wait until idle (make sure replicator becomes IDLE state from other state)
boolean success = enterIdleStateSignal.await(20, TimeUnit.SECONDS);
assertTrue(success);
// Once the replicator is idle get a snapshot of all the requests its made to _bulk_docs endpoint
int numDocsPushed = 0;
BlockingQueue<RecordedRequest> requests = dispatcher.getRequestQueueSnapshot(MockHelper.PATH_REGEX_BULK_DOCS);
for (RecordedRequest request : requests) {
Log.i(Log.TAG_SYNC, "request: %s", request);
byte[] body = MockHelper.getUncompressedBody(request);
Map<String, Object> jsonMap = MockHelper.getJsonMapFromRequest(body);
List docs = (List) jsonMap.get("docs");
numDocsPushed += docs.size();
}
// WORKAROUND: CBL Java Unit Test on Jenkins rarely fails following.
// It seems threading issue exists, and replicator becomes IDLE even tasks in batcher.
if (System.getProperty("java.vm.name").equalsIgnoreCase("Dalvik")) {
// Assert that all docs have already been pushed by the time it goes IDLE
assertEquals(numDocs, numDocsPushed);
}
// Stop replicator and MockWebServer
stopReplication(replication);
// wait until checkpoint is pushed, since it can happen _after_ replication is finished.
// if this isn't done, there can be IOExceptions when calling server.shutdown()
waitForPutCheckpointRequestWithSeq(dispatcher, (int) database.getLastSequenceNumber());
} finally {
server.shutdown();
ReplicationInternal.INBOX_CAPACITY = previous;
}
}
public void testCheckSessionAndCheckpointWhenRetryingReplication() throws Exception {
int prev_RETRY_DELAY_MS = RemoteRequestRetry.RETRY_DELAY_MS;
int prev_RETRY_DELAY_SECONDS = ReplicationInternal.RETRY_DELAY_SECONDS;
int prev_MAX_RETRIES = ReplicationInternal.MAX_RETRIES;
try {
RemoteRequestRetry.RETRY_DELAY_MS = 5; // speed up test execution (inner loop retry delay)
ReplicationInternal.RETRY_DELAY_SECONDS = 1; // speed up test execution (outer loop retry delay)
ReplicationInternal.MAX_RETRIES = 3; // speed up test execution (outer loop retry count)
String fakeEmail = "myfacebook@gmail.com";
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// set up request
{
// response for /db/_session
MockSessionGet mockSessionGet = new MockSessionGet();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_SESSION, mockSessionGet.generateMockResponse());
// response for /db/_facebook
MockFacebookAuthPost mockFacebookAuthPost = new MockFacebookAuthPost();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_FACEBOOK_AUTH, mockFacebookAuthPost.generateMockResponseForSuccess(fakeEmail));
// response for /db/_local/.*
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(500);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// response for /db/_revs_diff
MockRevsDiff mockRevsDiff = new MockRevsDiff();
mockRevsDiff.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// response for /db/_bulk_docs -- 503 errors
MockResponse mockResponse = new MockResponse().setResponseCode(503);
WrappedSmartMockResponse mockBulkDocs = new WrappedSmartMockResponse(mockResponse, false);
mockBulkDocs.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
}
server.play();
// register bogus fb token
Authenticator facebookAuthenticator = AuthenticatorFactory.createFacebookAuthenticator("fake_access_token");
// create replication
Replication replication = database.createPushReplication(server.getUrl("/db"));
replication.setAuthenticator(facebookAuthenticator);
replication.setContinuous(true);
CountDownLatch replicationIdle = new CountDownLatch(1);
ReplicationIdleObserver idleObserver = new ReplicationIdleObserver(replicationIdle);
replication.addChangeListener(idleObserver);
replication.start();
// wait until idle
boolean success = replicationIdle.await(30, TimeUnit.SECONDS);
assertTrue(success);
replication.removeChangeListener(idleObserver);
// create a doc in local db
Document doc1 = createDocumentForPushReplication("doc1", null, null);
// initial request
{
// check /db/_session
RecordedRequest sessionRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_SESSION);
assertNotNull(sessionRequest);
dispatcher.takeRecordedResponseBlocking(sessionRequest);
// check /db/_facebook
RecordedRequest facebookSessionRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_FACEBOOK_AUTH);
assertNotNull(facebookSessionRequest);
dispatcher.takeRecordedResponseBlocking(facebookSessionRequest);
// check /db/_local/.*
RecordedRequest checkPointRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHECKPOINT);
assertNotNull(checkPointRequest);
dispatcher.takeRecordedResponseBlocking(checkPointRequest);
// check /db/_revs_diff
RecordedRequest revsDiffRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_REVS_DIFF);
assertNotNull(revsDiffRequest);
dispatcher.takeRecordedResponseBlocking(revsDiffRequest);
// we should expect to at least see numAttempts attempts at doing POST to _bulk_docs
// 1st attempt
// numAttempts are number of times retry in 1 attempt.
int numAttempts = RemoteRequestRetry.MAX_RETRIES + 1; // total number of attempts = 4 (1 initial + MAX_RETRIES)
for (int i = 0; i < numAttempts; i++) {
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(request);
dispatcher.takeRecordedResponseBlocking(request);
}
}
// To test following, requires to fix #299 (improve retry behavior)
// Retry requests
// outer retry loop
for (int j = 0; j < ReplicationInternal.MAX_RETRIES; j++) {
// MockSessionGet does not support isSticky
MockSessionGet mockSessionGet = new MockSessionGet();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_SESSION, mockSessionGet.generateMockResponse());
// MockFacebookAuthPost does not support isSticky
MockFacebookAuthPost mockFacebookAuthPost = new MockFacebookAuthPost();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_FACEBOOK_AUTH, mockFacebookAuthPost.generateMockResponseForSuccess(fakeEmail));
// check /db/_session
RecordedRequest sessionRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_SESSION);
assertNotNull(sessionRequest);
dispatcher.takeRecordedResponseBlocking(sessionRequest);
// check /db/_facebook
RecordedRequest facebookSessionRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_FACEBOOK_AUTH);
assertNotNull(facebookSessionRequest);
dispatcher.takeRecordedResponseBlocking(facebookSessionRequest);
// check /db/_local/.*
RecordedRequest checkPointRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHECKPOINT);
assertNotNull(checkPointRequest);
dispatcher.takeRecordedResponseBlocking(checkPointRequest);
// check /db/_revs_diff
RecordedRequest revsDiffRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_REVS_DIFF);
assertNotNull(revsDiffRequest);
dispatcher.takeRecordedResponseBlocking(revsDiffRequest);
// we should expect to at least see numAttempts attempts at doing POST to _bulk_docs
// 1st attempt
// numAttempts are number of times retry in 1 attempt.
int numAttempts = RemoteRequestRetry.MAX_RETRIES + 1; // total number of attempts = 4 (1 initial + MAX_RETRIES)
for (int i = 0; i < numAttempts; i++) {
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(request);
dispatcher.takeRecordedResponseBlocking(request);
}
}
stopReplication(replication);
} finally {
server.shutdown();
}
} finally {
RemoteRequestRetry.RETRY_DELAY_MS = prev_RETRY_DELAY_MS;
ReplicationInternal.RETRY_DELAY_SECONDS = prev_RETRY_DELAY_SECONDS;
ReplicationInternal.MAX_RETRIES = prev_MAX_RETRIES;
}
}
public void failingTestStopReplicatorWhenRetryingReplicationWithPermanentError() throws Exception {
RemoteRequestRetry.RETRY_DELAY_MS = 5; // speed up test execution (inner loop retry delay)
ReplicationInternal.RETRY_DELAY_SECONDS = 1; // speed up test execution (outer loop retry delay)
ReplicationInternal.MAX_RETRIES = 3; // speed up test execution (outer loop retry count)
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
// set up request
{
// response for /db/_local/.*
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(500);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// response for /db/_revs_diff
MockRevsDiff mockRevsDiff = new MockRevsDiff();
mockRevsDiff.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// response for /db/_bulk_docs -- 400 Bad Request (not transient error)
MockResponse mockResponse = new MockResponse().setResponseCode(400);
WrappedSmartMockResponse mockBulkDocs = new WrappedSmartMockResponse(mockResponse, false);
mockBulkDocs.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
}
server.play();
// create replication
Replication replication = database.createPushReplication(server.getUrl("/db"));
replication.setContinuous(true);
// add replication observer for IDLE state
CountDownLatch replicationIdle = new CountDownLatch(1);
ReplicationIdleObserver idleObserver = new ReplicationIdleObserver(replicationIdle);
replication.addChangeListener(idleObserver);
// add replication observer for finished
CountDownLatch replicationDoneSignal = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal);
replication.addChangeListener(replicationFinishedObserver);
replication.start();
// wait until idle
boolean success = replicationIdle.await(30, TimeUnit.SECONDS);
assertTrue(success);
replication.removeChangeListener(idleObserver);
// create a doc in local db
Document doc1 = createDocumentForPushReplication("doc1", null, null);
// initial request
{
// check /db/_local/.*
RecordedRequest checkPointRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHECKPOINT);
assertNotNull(checkPointRequest);
dispatcher.takeRecordedResponseBlocking(checkPointRequest);
// check /db/_revs_diff
RecordedRequest revsDiffRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_REVS_DIFF);
assertNotNull(revsDiffRequest);
dispatcher.takeRecordedResponseBlocking(revsDiffRequest);
// we should observe only one POST to _bulk_docs request because error is not transient error
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(request);
dispatcher.takeRecordedResponseBlocking(request);
}
// Without fixing CBL Java Core #352, following code causes hang.
// wait for replication to finish
boolean didNotTimeOut = replicationDoneSignal.await(180, TimeUnit.SECONDS);
Log.d(TAG, "replicationDoneSignal.await done, didNotTimeOut: " + didNotTimeOut);
assertFalse(replication.isRunning());
server.shutdown();
}
public void testReplicationRestartPreservesValues() throws Exception {
// make sure we are starting empty
assertEquals(0, database.getLastSequenceNumber());
// add docs
Map<String, Object> properties1 = new HashMap<String, Object>();
properties1.put("doc1", "testContinuousPushReplicationGoesIdle");
final Document doc1 = createDocWithProperties(properties1);
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
server.play();
// checkpoint GET response w/ 404. also receives checkpoint PUT's
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// create continuos replication
Replication pusher = database.createPushReplication(server.getUrl("/db"));
pusher.setContinuous(true);
// add filter properties to the replicator
String filterName = "app/clientIdAndTablesSchemeDocIdFilter";
pusher.setFilter(filterName);
Map<String, Object> filterParams = new HashMap<String, Object>();
String filterParam = "tablesSchemeDocId";
String filterVal = "foo";
filterParams.put(filterParam, filterVal);
pusher.setFilterParams(filterParams);
// doc ids
pusher.setDocIds(Arrays.asList(doc1.getId()));
// custom authenticator
BasicAuthenticator authenticator = new BasicAuthenticator("foo", "bar");
pusher.setAuthenticator(authenticator);
// custom request headers
Map<String, Object> requestHeaders = new HashMap<String, Object>();
requestHeaders.put("foo", "bar");
pusher.setHeaders(requestHeaders);
// create target
pusher.setCreateTarget(true);
// start the continuous replication
CountDownLatch replicationIdleSignal = new CountDownLatch(1);
ReplicationIdleObserver replicationIdleObserver = new ReplicationIdleObserver(replicationIdleSignal);
pusher.addChangeListener(replicationIdleObserver);
pusher.start();
// wait until we get an IDLE event
boolean successful = replicationIdleSignal.await(30, TimeUnit.SECONDS);
assertTrue(successful);
// restart the replication
CountDownLatch replicationIdleSignal2 = new CountDownLatch(1);
ReplicationIdleObserver replicationIdleObserver2 = new ReplicationIdleObserver(replicationIdleSignal2);
pusher.addChangeListener(replicationIdleObserver2);
pusher.restart();
// wait until we get another IDLE event
successful = replicationIdleSignal2.await(30, TimeUnit.SECONDS);
assertTrue(successful);
// verify the restarted replication still has the values we set up earlier
assertEquals(filterName, pusher.getFilter());
assertTrue(pusher.getFilterParams().size() == 1);
assertEquals(filterVal, pusher.getFilterParams().get(filterParam));
assertTrue(pusher.isContinuous());
assertEquals(Arrays.asList(doc1.getId()), pusher.getDocIds());
assertEquals(authenticator, pusher.getAuthenticator());
assertEquals(requestHeaders, pusher.getHeaders());
assertTrue(pusher.shouldCreateTarget());
} finally {
server.shutdown();
}
}
public void testContinuousPullReplicationGoesIdleTwice() throws Exception {
Log.e(TAG, "TEST START");
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint PUT or GET response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// add non-sticky changes response that returns no changes
// this will cause the pull replicator to go into the IDLE state
MockChangesFeed mockChangesFeed = new MockChangesFeed();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// add _changes response that just blocks for a few seconds to emulate
// server that doesn't have any new changes. while the puller is blocked on this request
// to the _changes feed, the test will add a new changes listener that waits until it goes
// into the RUNNING state
MockChangesFeedNoResponse mockChangesFeedNoResponse = new MockChangesFeedNoResponse();
// It seems 5 sec delay might not be necessary. It reduce test duration 5 sec
//mockChangesFeedNoResponse.setDelayMs(5 * 1000);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedNoResponse);
// after the above changes feed response returns after 5 seconds, the next time
// the puller gets the _changes feed, return a response that there is 1 new doc.
// this will cause the puller to go from IDLE -> RUNNING
MockDocumentGet.MockDocument mockDoc1 = new MockDocumentGet.MockDocument("doc1", "1-5e38", 1);
mockDoc1.setJsonMap(MockHelper.generateRandomJsonMap());
mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc1));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// at this point, the mock _changes feed is done simulating new docs on the sync gateway
// since we've done enough to reproduce the problem. so at this point, just make the changes
// feed block for a long time.
MockChangesFeedNoResponse mockChangesFeedNoResponse2 = new MockChangesFeedNoResponse();
mockChangesFeedNoResponse2.setDelayMs(6000 * 1000); // block for > 1hr
mockChangesFeedNoResponse2.setSticky(true); // continue this behavior indefinitely
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedNoResponse2);
// doc1 response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDoc1);
dispatcher.enqueueResponse(mockDoc1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
mockRevsDiff.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
Log.e(TAG, "SERVER START");
server.play();
// create pull replication
final Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
final CountDownLatch enteredIdleState1 = new CountDownLatch(1);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getSource().getStatus() == Replication.ReplicationStatus.REPLICATION_IDLE) {
Log.e(TAG, "Replication is IDLE 1");
enteredIdleState1.countDown();
pullReplication.removeChangeListener(this);
}
}
});
Log.e(TAG, "REPLICATOR START");
// 1. start pull replication
pullReplication.start();
// 2. wait until its IDLE
boolean success = enteredIdleState1.await(30, TimeUnit.SECONDS);
assertTrue(success);
// 3. see server side preparation
// change listener to see if its RUNNING
// we can't add this earlier, because the countdown latch would get
// triggered too early (the other approach would be to set the countdown
// latch to a higher number)
final CountDownLatch enteredRunningState = new CountDownLatch(1);
final CountDownLatch enteredIdleState2 = new CountDownLatch(1);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getSource().getStatus() == Replication.ReplicationStatus.REPLICATION_ACTIVE) {
if (enteredRunningState.getCount() > 0) {
Log.e(TAG, "Replication is RUNNING");
enteredRunningState.countDown();
}
}
// second IDLE change listener
// handling IDLE event here. It seems IDLE event was fired before set IDLE event handler
else if (event.getSource().getStatus() == Replication.ReplicationStatus.REPLICATION_IDLE) {
if (enteredRunningState.getCount() <= 0 && enteredIdleState2.getCount() > 0) {
Log.e(TAG, "Replication is IDLE 2");
enteredIdleState2.countDown();
}
}
}
});
// 4. wait until its RUNNING
Log.e(TAG, "WAIT for RUNNING");
success = enteredRunningState.await(30, TimeUnit.SECONDS);
assertTrue(success);
// 5. wait until its IDLE again. before the fix, it would never go IDLE again, and so
// this would timeout and the test would fail.
Log.e(TAG, "WAIT for IDLE");
success = enteredIdleState2.await(30, TimeUnit.SECONDS);
assertTrue(success);
Log.e(TAG, "STOP REPLICATOR");
// clean up
stopReplication(pullReplication);
Log.e(TAG, "STOP MOCK SERVER");
} finally {
server.shutdown();
}
Log.e(TAG, "TEST DONE");
}
public void testContinuousPullReplicationSendStoppedOnce() throws Exception {
Log.d(TAG, "TEST START");
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint PUT or GET response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// add non-sticky changes response that returns no changes
// this will cause the pull replicator to go into the IDLE state
MockChangesFeed mockChangesFeed = new MockChangesFeed();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
server.play();
// create pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
final CountDownLatch enteredIdleState = new CountDownLatch(1);
final CountDownLatch enteredStoppedState = new CountDownLatch(2);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getSource().getStatus() == Replication.ReplicationStatus.REPLICATION_IDLE) {
Log.d(TAG, "Replication is IDLE");
enteredIdleState.countDown();
} else if (event.getSource().getStatus() == Replication.ReplicationStatus.REPLICATION_STOPPED) {
Log.d(TAG, "Replication is STOPPED");
enteredStoppedState.countDown();
}
}
});
// 1. start pull replication
pullReplication.start();
// 2. wait until its IDLE
boolean success = enteredIdleState.await(30, TimeUnit.SECONDS);
assertTrue(success);
// 3. stop pull replication
stopReplication(pullReplication);
// 4. wait until its RUNNING
Log.d(TAG, "WAIT for STOPPED");
//success = enteredStoppedState.await(Replication.DEFAULT_MAX_TIMEOUT_FOR_SHUTDOWN + 30, TimeUnit.SECONDS); // replicator maximum shutdown timeout 60 sec + additional 30 sec for other stuff
// NOTE: 90 sec is too long for unit test. chnaged to 30 sec
// NOTE2: 30 sec is still too long for unit test. changed to 15sec.
success = enteredStoppedState.await(15, TimeUnit.SECONDS); // replicator maximum shutdown timeout 60 sec + additional 30 sec for other stuff
// if STOPPED notification was sent twice, enteredStoppedState becomes 0.
assertEquals(1, enteredStoppedState.getCount());
assertFalse(success);
} finally {
Log.d(TAG, "STOP MOCK SERVER");
server.shutdown();
}
Log.d(TAG, "TEST DONE");
}
public void testOneTimePullReplicationSendStoppedOnce() throws Exception {
Log.d(TAG, "TEST START");
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint PUT or GET response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// add non-sticky changes response that returns no changes
// this will cause the pull replicator to go into the IDLE state
MockChangesFeed mockChangesFeed = new MockChangesFeed();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
server.play();
// create pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(false);
// handle STOPPED notification
final CountDownLatch enteredStoppedState = new CountDownLatch(2);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getSource().getStatus() == Replication.ReplicationStatus.REPLICATION_STOPPED &&
event.getTransition().getDestination() == ReplicationState.STOPPED) {
Log.d(TAG, "Replication is STOPPED");
enteredStoppedState.countDown();
}
}
});
// 1. start pull replication
pullReplication.start();
// 2. wait until its RUNNING
Log.d(TAG, "WAIT for STOPPED");
boolean success = enteredStoppedState.await(15, TimeUnit.SECONDS);
// if STOPPED notification was sent twice, enteredStoppedState becomes 0.
assertEquals(1, enteredStoppedState.getCount());
assertFalse(success);
} finally {
Log.d(TAG, "STOP MOCK SERVER");
server.shutdown();
}
Log.d(TAG, "TEST DONE");
}
public void testPullReplicatonSendIdleStateAfterCheckPoint() throws Exception {
Log.d(TAG, "TEST START");
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint PUT or GET response (sticky) (for both push and pull)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// add non-sticky changes response that returns no changes (for pull)
// this will cause the pull replicator to go into the IDLE state
MockChangesFeed mockChangesFeedEmpty = new MockChangesFeed();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedEmpty.generateMockResponse());
// start mock server
server.play();
// create pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
// handler to wait for IDLE
final CountDownLatch pullInitialIdleState = new CountDownLatch(1);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getSource().getStatus() == Replication.ReplicationStatus.REPLICATION_IDLE) {
pullInitialIdleState.countDown();
}
}
});
// start pull replication
//pushReplication.start();
pullReplication.start();
// 1. Wait till replicator becomes IDLE
boolean success = pullInitialIdleState.await(30, TimeUnit.SECONDS);
assertTrue(success);
// clear out existing queued mock responses to make room for new ones
dispatcher.clearQueuedResponse(MockHelper.PATH_REGEX_CHANGES);
// 2. Update change event handler for handling ACTIVE and IDLE
final CountDownLatch activeSignal = new CountDownLatch(1);
final CountDownLatch idleSignal = new CountDownLatch(1);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
Log.e(TAG, "[changed] PULL -> " + event);
if (event.getSource().getStatus() == Replication.ReplicationStatus.REPLICATION_IDLE) {
// make sure pull replicator becomes IDLE after ACTIVE state.
// so ignore any IDLE state before ACTIVE.
if (activeSignal.getCount() == 0) {
idleSignal.countDown();
}
} else if (event.getSource().getStatus() == Replication.ReplicationStatus.REPLICATION_ACTIVE) {
activeSignal.countDown();
}
}
});
// 3. Create document into local db
Document doc = database.createDocument();
Map<String, Object> props = new HashMap<String, Object>();
props.put("key", "1");
doc.putProperties(props);
// 4. Based on local doc information, prepare mock change response for 1st /_changes request
String docId = doc.getId();
String revId = doc.getCurrentRevisionId();
int lastSeq = (int) database.getLastSequenceNumber();
MockDocumentGet.MockDocument mockDocument1 = new MockDocumentGet.MockDocument(docId, revId, lastSeq + 1);
mockDocument1.setJsonMap(MockHelper.generateRandomJsonMap());
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument1));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// 5. Prepare next mock change response for 2nd /_changes request (blocking for while)
MockChangesFeedNoResponse mockChangesFeedNoResponse2 = new MockChangesFeedNoResponse();
mockChangesFeedNoResponse2.setDelayMs(60 * 1000);
mockChangesFeedNoResponse2.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedNoResponse2);
// 6. wait for Replication IDLE -> ACTIVE -> IDLE
success = activeSignal.await(30, TimeUnit.SECONDS);
assertTrue(success);
success = idleSignal.await(30, TimeUnit.SECONDS);
assertTrue(success);
// stop pull replication
stopReplication(pullReplication);
} finally {
server.shutdown();
}
Log.d(TAG, "TEST DONE");
}
public void testPullReplicatonWithManyAttachmentRevisions() throws Exception {
Log.d(TAG, "TEST START: testPullReplicatonWithManyAttachmentRevisions()");
String docID = "11111";
String key = "key";
String value = "one-one-one-one";
String attachmentName = "attachment.png";
// create initial document (Revision 1-xxxx)
Map<String, Object> props1 = new HashMap<String, Object>();
props1.put("_id", docID);
props1.put(key, value);
RevisionInternal rev = new RevisionInternal(props1);
Status status = new Status();
RevisionInternal savedRev = database.putRevision(rev, null, false, status);
String rev1ID = savedRev.getRevID();
// add attachment to doc (Revision 2-xxxx)
Document doc = database.getDocument(docID);
UnsavedRevision newRev = doc.createRevision();
InputStream attachmentStream = getAsset(attachmentName);
newRev.setAttachment(attachmentName, "image/png", attachmentStream);
SavedRevision saved = newRev.save(true);
String rev2ID = doc.getCurrentRevisionId();
Log.w(TAG, "saved => " + saved);
Log.w(TAG, "revID => " + doc.getCurrentRevisionId());
// Create 5 revisions with 50 conflicts each
int j = 3;
for (; j < 5; j++) {
// Create a conflict, won by the new revision:
Map<String, Object> props = new HashMap<String, Object>();
props.put("_id", docID);
props.put("_rev", j + "-0000");
props.put(key, value);
RevisionInternal leaf = new RevisionInternal(props);
database.forceInsert(leaf, new ArrayList<String>(), null);
Log.w(TAG, "revID => " + doc.getCurrentRevisionId());
for (int i = 0; i < 49; i++) {
// Create a conflict, won by the new revision:
Map<String, Object> props_conflict = new HashMap<String, Object>();
props_conflict.put("_id", docID);
String revStr = String.format("%d-%04d", j, i);
props_conflict.put("_rev", revStr);
props_conflict.put(key, value);
// attachment
byte[] attach1 = "This is the body of attach1".getBytes();
String base64 = Base64.encodeBytes(attach1);
Map<String, Object> attachment = new HashMap<String, Object>();
attachment.put("content_type", "text/plain");
attachment.put("data", base64);
Map<String, Object> attachmentDict = new HashMap<String, Object>();
attachmentDict.put("test_attachment", attachment);
props_conflict.put("_attachments", attachmentDict);
// end of attachment
RevisionInternal leaf_conflict = new RevisionInternal(props_conflict);
List<String> revHistory = new ArrayList<String>();
revHistory.add(leaf_conflict.getRevID());
for (int k = j - 1; k > 2; k
revHistory.add(String.format("%d-0000", k));
}
revHistory.add(rev2ID);
revHistory.add(rev1ID);
database.forceInsert(leaf_conflict, revHistory, null);
Log.w(TAG, "revID => " + doc.getCurrentRevisionId());
}
}
String docId = doc.getId();
String revId = j + "-00";
int lastSeq = (int) database.getLastSequenceNumber();
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// checkpoint PUT or GET response (sticky) (for both push and pull)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
MockChangesFeed mockChangesFeedEmpty = new MockChangesFeed();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedEmpty.generateMockResponse());
// start mock server
server.play();
// create pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
final CountDownLatch idleSignal1 = new CountDownLatch(1);
final CountDownLatch idleSignal2 = new CountDownLatch(2);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
Log.e(TAG, event.toString());
if (event.getError() != null) {
Assert.fail("Should not have any error....");
}
if (event.getSource().getStatus() == Replication.ReplicationStatus.REPLICATION_IDLE) {
idleSignal1.countDown();
idleSignal2.countDown();
}
}
});
// start pull replication
pullReplication.start();
boolean success = idleSignal1.await(30, TimeUnit.SECONDS);
assertTrue(success);
MockDocumentGet.MockDocument mockDocument1 = new MockDocumentGet.MockDocument(docId, revId, lastSeq + 1);
mockDocument1.setJsonMap(MockHelper.generateRandomJsonMap());
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument1));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// doc response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDocument1);
dispatcher.enqueueResponse(mockDocument1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// check /db/docid?...
RecordedRequest request = dispatcher.takeRequestBlocking(mockDocument1.getDocPathRegex(), 30 * 1000);
Log.e(TAG, request.toString());
Map<String, String> queries = query2map(request.getPath());
String atts_since = URLDecoder.decode(queries.get("atts_since"), "UTF-8");
List<String> json = (List<String>) str2json(atts_since);
Log.e(TAG, json.toString());
assertNotNull(json);
// atts_since parameter should be limit to PullerInternal.MAX_NUMBER_OF_ATTS_SINCE
assertTrue(json.size() == PullerInternal.MAX_NUMBER_OF_ATTS_SINCE);
boolean success2 = idleSignal2.await(30, TimeUnit.SECONDS);
assertTrue(success2);
// stop pull replication
stopReplication(pullReplication);
} finally {
server.shutdown();
}
Log.d(TAG, "TEST END: testPullReplicatonWithManyAttachmentRevisions()");
}
public static Object str2json(String value) {
Object result = null;
try {
result = Manager.getObjectMapper().readValue(value, Object.class);
} catch (Exception e) {
Log.w("Unable to parse JSON Query", e);
}
return result;
}
public static Map<String, String> query2map(String queryString) {
Map<String, String> queries = new HashMap<String, String>();
for (String component : queryString.split("&")) {
int location = component.indexOf('=');
if (location > 0) {
String key = component.substring(0, location);
String value = component.substring(location + 1);
queries.put(key, value);
}
}
return queries;
}
class CustomMultipartReaderDelegate implements MultipartReaderDelegate {
public Map<String, String> headers = null;
public byte[] data = null;
public boolean gzipped = false;
public boolean bJson = false;
@Override
public void startedPart(Map<String, String> headers) {
gzipped = headers.get("Content-Encoding") != null && headers.get("Content-Encoding").contains("gzip");
bJson = headers.get("Content-Type") != null && headers.get("Content-Type").contains("application/json");
}
@Override
public void appendToPart(byte[] data) {
if (gzipped && bJson) {
this.data = Utils.decompressByGzip(data);
} else if (bJson) {
this.data = data;
}
}
@Override
public void appendToPart(final byte[] data, int off, int len) {
byte[] b = Arrays.copyOfRange(data, off, len - off);
appendToPart(b);
}
@Override
public void finishedPart() {
}
}
public void testPushReplActiveState() throws Exception {
Log.d(TAG, "TEST START: testPushReplActiveState()");
// make sure we are starting empty
assertEquals(0, database.getLastSequenceNumber());
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
server.play();
// checkpoint GET response w/ 404. also receives checkpoint PUT's
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
Replication pullReplication = database.createPushReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
final String checkpointId = pullReplication.remoteCheckpointDocID(); // save the checkpoint id for later usage
// Event handler for IDLE
CountDownLatch idleSignal = new CountDownLatch(1);
ReplicationIdleObserver idleObserver = new ReplicationIdleObserver(idleSignal);
pullReplication.addChangeListener(idleObserver);
// start the continuous replication
pullReplication.start();
// wait until we get an IDLE event
boolean successful = idleSignal.await(30, TimeUnit.SECONDS);
assertTrue(successful);
pullReplication.removeChangeListener(idleObserver);
// Event handler for ACTIVE
CountDownLatch activeSignal = new CountDownLatch(1);
ReplicationActiveObserver activeObserver = new ReplicationActiveObserver(activeSignal);
pullReplication.addChangeListener(activeObserver);
// Event handler for IDLE2
CountDownLatch idleSignal2 = new CountDownLatch(1);
ReplicationIdleObserver idleObserver2 = new ReplicationIdleObserver(idleSignal2);
pullReplication.addChangeListener(idleObserver2);
// add docs
Map<String, Object> properties1 = new HashMap<String, Object>();
properties1.put("doc1", "testPushReplActiveState");
final Document doc1 = createDocWithProperties(properties1);
// wait until we get an ACTIVE event
successful = activeSignal.await(30, TimeUnit.SECONDS);
assertTrue(successful);
pullReplication.removeChangeListener(activeObserver);
// check _bulk_docs
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(request);
assertTrue(MockHelper.getUtf8Body(request).contains("testPushReplActiveState"));
// wait until we get an IDLE event
successful = idleSignal2.await(30, TimeUnit.SECONDS);
assertTrue(successful);
pullReplication.removeChangeListener(idleObserver2);
// stop pull replication
stopReplication(pullReplication);
} finally {
server.shutdown();
}
Log.d(TAG, "TEST END: testPushReplActiveState()");
}
public void testStop() throws Exception {
Log.d(Log.TAG, "START testStop()");
boolean success = false;
// create mock server
MockDispatcher dispatcher = new MockDispatcher();
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
MockWebServer server = new MockWebServer();
server.setDispatcher(dispatcher);
try {
server.play();
// checkpoint PUT or GET response (sticky) (for both push and pull)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// create pull replication & start it
Replication pull = database.createPullReplication(server.getUrl("/db"));
pull.setContinuous(true);
final CountDownLatch pullIdleState = new CountDownLatch(1);
ReplicationIdleObserver pullIdleObserver = new ReplicationIdleObserver(pullIdleState);
pull.addChangeListener(pullIdleObserver);
pull.start();
// create push replication & start it
Replication push = database.createPullReplication(server.getUrl("/db"));
push.setContinuous(true);
final CountDownLatch pushIdleState = new CountDownLatch(1);
ReplicationIdleObserver pushIdleObserver = new ReplicationIdleObserver(pushIdleState);
push.addChangeListener(pushIdleObserver);
push.start();
// wait till both push and pull replicators become idle.
success = pullIdleState.await(30, TimeUnit.SECONDS);
assertTrue(success);
pull.removeChangeListener(pullIdleObserver);
success = pushIdleState.await(30, TimeUnit.SECONDS);
assertTrue(success);
push.removeChangeListener(pushIdleObserver);
// stop both pull and push replicators
stopReplication(pull);
stopReplication(push);
boolean observedCBLRequestWorker = false;
// First give 5 sec to clean thread status.
try {
Thread.sleep(5 * 1000);
} catch (Exception e) {
}
// all threads which are associated with replicators should be terminated.
Set<Thread> threadSet = Thread.getAllStackTraces().keySet();
for (Thread t : threadSet) {
if (t.isAlive()) {
observedCBLRequestWorker = true;
if (t.getName().indexOf("CBLRequestWorker") != -1) {
observedCBLRequestWorker = true;
break;
}
}
}
// second attemtpt, if still observe CBLRequestWorker thread, makes error
if (observedCBLRequestWorker) {
// give 10 sec to clean thread status.
try {
Thread.sleep(10 * 1000);
} catch (Exception e) {
}
// all threads which are associated with replicators should be terminated.
Set<Thread> threadSet2 = Thread.getAllStackTraces().keySet();
for (Thread t : threadSet2) {
if (t.isAlive()) {
assertEquals(-1, t.getName().indexOf("CBLRequestWorker"));
}
}
}
} finally {
// shutdown mock server
server.shutdown();
}
Log.d(Log.TAG, "END testStop()");
}
public void testSetFilterParams() throws CouchbaseLiteException, IOException, InterruptedException {
// make sure we are starting empty
assertEquals(0, database.getLastSequenceNumber());
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
server.play();
// checkpoint GET response w/ 404. also receives checkpoint PUT's
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// create 10 documents and delete 5
for (int i = 0; i < 10; i++) {
Document doc = null;
if (i % 2 == 0) {
doc = createDocument(i, true);
} else {
doc = createDocument(i, false);
}
if (i % 2 == 0) {
try {
doc.delete();
} catch (CouchbaseLiteException e) {
e.printStackTrace();
}
}
}
final CountDownLatch latch = new CountDownLatch(10);
final CountDownLatch check = new CountDownLatch(10);
database.setFilter("unDeleted", new ReplicationFilter() {
@Override
public boolean filter(SavedRevision savedRevision, Map<String, Object> params) {
if (params == null || !"hello".equals(params.get("name"))) {
check.countDown();
}
latch.countDown();
return !savedRevision.isDeletion();
}
});
Replication pushReplication = database.createPushReplication(server.getUrl("/db"));
pushReplication.setContinuous(false);
pushReplication.setFilter("unDeleted");
pushReplication.setFilterParams(Collections.<String, Object>singletonMap("name", "hello"));
pushReplication.start();
boolean success = latch.await(30, TimeUnit.SECONDS);
assertTrue(success);
assertEquals(10, check.getCount());
} finally {
server.shutdown();
}
}
private Document createDocument(int number, boolean flag) {
SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
Calendar calendar = GregorianCalendar.getInstance();
String currentTimeString = dateFormatter.format(calendar.getTime());
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("type", "test_doc");
properties.put("created_at", currentTimeString);
if (flag == true) {
properties.put("name", "Waldo");
}
Document document = database.getDocument(String.valueOf(number));
try {
document.putProperties(properties);
} catch (CouchbaseLiteException e) {
e.printStackTrace();
}
return document;
}
public void testRestartWithStoppedReplicator() throws Exception {
MockDispatcher dispatcher = new MockDispatcher();
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
MockWebServer server = MockHelper.getPreloadedPullTargetMockCouchDB(dispatcher, 0, 0);
try {
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
// it should go idle twice, hence countdown latch = 2
final CountDownLatch replicationIdleFirstTime = new CountDownLatch(1);
final CountDownLatch replicationIdleSecondTime = new CountDownLatch(2);
final CountDownLatch replicationStoppedFirstTime = new CountDownLatch(1);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getTransition() != null && event.getTransition().getDestination() == ReplicationState.IDLE) {
Log.e(Log.TAG, "IDLE");
replicationIdleFirstTime.countDown();
replicationIdleSecondTime.countDown();
} else if (event.getTransition() != null && event.getTransition().getDestination() == ReplicationState.STOPPED) {
Log.e(Log.TAG, "STOPPED");
replicationStoppedFirstTime.countDown();
}
}
});
pullReplication.start();
// wait until replication goes idle
boolean success = replicationIdleFirstTime.await(60, TimeUnit.SECONDS);
assertTrue(success);
pullReplication.stop();
// wait until replication stop
success = replicationStoppedFirstTime.await(60, TimeUnit.SECONDS);
assertTrue(success);
pullReplication.restart();
// wait until replication goes idle again
success = replicationIdleSecondTime.await(60, TimeUnit.SECONDS);
assertTrue(success);
stopReplication(pullReplication);
} finally {
// cleanup / shutdown
server.shutdown();
}
}
public void test18_PendingDocumentIDs() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
server.setDispatcher(dispatcher);
try {
server.play();
// checkpoint GET response w/ 404 + respond to all PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(50);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
Replication repl = database.createPushReplication(server.getUrl("/db"));
assertNotNull(repl.getPendingDocumentIDs());
assertEquals(0, repl.getPendingDocumentIDs().size());
assertTrue(database.runInTransaction(
new TransactionalTask() {
@Override
public boolean run() {
for (int i = 1; i <= 10; i++) {
Document doc = database.getDocument(String.format("doc-%d", i));
Map<String, Object> props = new HashMap<String, Object>();
props.put("index", i);
props.put("bar", false);
try {
doc.putProperties(props);
} catch (CouchbaseLiteException e) {
fail(e.getMessage());
}
}
return true;
}
}
));
assertEquals(10, repl.getPendingDocumentIDs().size());
assertTrue(repl.isDocumentPending(database.getDocument("doc-1")));
runReplication(repl);
assertNotNull(repl.getPendingDocumentIDs());
assertEquals(0, repl.getPendingDocumentIDs().size());
assertFalse(repl.isDocumentPending(database.getDocument("doc-1")));
assertTrue(database.runInTransaction(
new TransactionalTask() {
@Override
public boolean run() {
for (int i = 11; i <= 20; i++) {
Document doc = database.getDocument(String.format("doc-%d", i));
Map<String, Object> props = new HashMap<String, Object>();
props.put("index", i);
props.put("bar", false);
try {
doc.putProperties(props);
} catch (CouchbaseLiteException e) {
fail(e.getMessage());
}
}
return true;
}
}
));
repl = database.createPushReplication(server.getUrl("/db"));
assertNotNull(repl.getPendingDocumentIDs());
assertEquals(10, repl.getPendingDocumentIDs().size());
assertTrue(repl.isDocumentPending(database.getDocument("doc-11")));
assertFalse(repl.isDocumentPending(database.getDocument("doc-1")));
// pull replication
repl = database.createPullReplication(server.getUrl("/db"));
assertNull(repl.getPendingDocumentIDs());
runReplication(repl);
assertNull(repl.getPendingDocumentIDs());
} finally {
// cleanup / shutdown
server.shutdown();
}
}
public void testExcessiveCheckpointingDuringPushReplication() throws Exception {
final int NUM_DOCS = 199;
List<Document> docs = new ArrayList<Document>();
// 1. Add more than 100 docs, as chunk size is 100
for (int i = 0; i < NUM_DOCS; i++) {
Map<String, Object> properties = new HashMap<String, Object>();
properties.put("testExcessiveCheckpointingDuringPushReplication", String.valueOf(i));
Document doc = createDocumentWithProperties(database, properties);
docs.add(doc);
}
// create mock server
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = new MockWebServer();
server.setDispatcher(dispatcher);
try {
server.play();
// checkpoint GET response -> error
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
mockRevsDiff.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
mockBulkDocs.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// checkpoint PUT response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// 2. Kick off continuous push replication
Replication replicator = database.createPushReplication(server.getUrl("/db"));
replicator.setContinuous(true);
CountDownLatch replicationIdleSignal = new CountDownLatch(1);
ReplicationIdleObserver replicationIdleObserver = new ReplicationIdleObserver(replicationIdleSignal);
replicator.addChangeListener(replicationIdleObserver);
replicator.start();
// 3. Wait for document to be pushed
// NOTE: (Not 100% reproducible) With CBL Java on Jenkins (Super slow environment),
// Replicator becomes IDLE between batches for this case, after 100 push replicated.
// TODO: Need to investigate
// wait until replication goes idle
boolean successful = replicationIdleSignal.await(60, TimeUnit.SECONDS);
assertTrue(successful);
// wait until mock server gets the checkpoint PUT request
boolean foundCheckpointPut = false;
String expectedLastSequence = String.valueOf(NUM_DOCS);
while (!foundCheckpointPut) {
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHECKPOINT);
if (request.getMethod().equals("PUT")) {
foundCheckpointPut = true;
String body = request.getUtf8Body();
Log.e("testExcessiveCheckpointingDuringPushReplication", "body => " + body);
// TODO: this is not valid if device can not handle all replication data at once
if (System.getProperty("java.vm.name").equalsIgnoreCase("Dalvik")) {
assertTrue(body.indexOf(expectedLastSequence) != -1);
}
// wait until mock server responds to the checkpoint PUT request
dispatcher.takeRecordedResponseBlocking(request);
}
}
// make some assertions about the outgoing _bulk_docs requests
RecordedRequest bulkDocsRequest1 = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(bulkDocsRequest1);
if (System.getProperty("java.vm.name").equalsIgnoreCase("Dalvik")) {
RecordedRequest bulkDocsRequest2 = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(bulkDocsRequest2);
// TODO: this is not valid if device can not handle all replication data at once
// order may not be guaranteed
assertTrue(isBulkDocJsonContainsDoc(bulkDocsRequest1, docs.get(0)) || isBulkDocJsonContainsDoc(bulkDocsRequest2, docs.get(0)));
assertTrue(isBulkDocJsonContainsDoc(bulkDocsRequest1, docs.get(100)) || isBulkDocJsonContainsDoc(bulkDocsRequest2, docs.get(100)));
}
// check if Android CBL client sent only one PUT /{db}/_local/xxxx request
// previous check already consume this request, so queue size should be 0.
BlockingQueue<RecordedRequest> queue = dispatcher.getRequestQueueSnapshot(MockHelper.PATH_REGEX_CHECKPOINT);
assertEquals(0, queue.size());
// cleanup
stopReplication(replicator);
} finally {
server.shutdown();
}
}
// NOTE: This test should be manually tested. This test uses delay, timeout, wait,...
// this could break test on Jenkins because it run on VM with ARM emulator.
// To run test, please remove "manual" from test method name.
public void manualTestBulkGetTimeout() throws Exception {
int def1 = CouchbaseLiteHttpClientFactory.DEFAULT_CONNECTION_TIMEOUT_SECONDS;
int def2 = CouchbaseLiteHttpClientFactory.DEFAULT_SO_TIMEOUT_SECONDS;
int def3 = ReplicationInternal.MAX_RETRIES;
int def4 = ReplicationInternal.RETRY_DELAY_SECONDS;
try {
// TIMEOUT 1 SEC
CouchbaseLiteHttpClientFactory.DEFAULT_CONNECTION_TIMEOUT_SECONDS = 1;
CouchbaseLiteHttpClientFactory.DEFAULT_SO_TIMEOUT_SECONDS = 1;
ReplicationInternal.MAX_RETRIES = 2;
ReplicationInternal.RETRY_DELAY_SECONDS = 0;
// serve 3 mock docs
int numMockDocsToServe = 2;
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
try {
// mock documents to be pulled
List<MockDocumentGet.MockDocument> mockDocs = MockHelper.getMockDocuments(numMockDocsToServe);
// respond to all GET (responds with 404) and PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
for (MockDocumentGet.MockDocument mockDocument : mockDocs) {
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument));
}
SmartMockResponseImpl smartMockResponse = new SmartMockResponseImpl(mockChangesFeed.generateMockResponse());
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, smartMockResponse);
// _bulk_get response
MockDocumentBulkGet mockBulkGet = new MockDocumentBulkGet();
for (MockDocumentGet.MockDocument mockDocument : mockDocs) {
mockBulkGet.addDocument(mockDocument);
}
// _bulk_get delays 4 SEC, which is longer custom timeout 5sec.
// so this cause timeout.
mockBulkGet.setDelayMs(4 * 1000);
// makes sticky for retry reponse
mockBulkGet.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_GET, mockBulkGet);
// start mock server
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
runReplication(pullReplication, 3 * 60);
assertNotNull(pullReplication.getLastError());
assertTrue(pullReplication.getLastError() instanceof java.net.SocketTimeoutException);
// dump out the outgoing requests for bulk docs
BlockingQueue<RecordedRequest> bulkGetRequests = dispatcher.getRequestQueueSnapshot(MockHelper.PATH_REGEX_BULK_GET);
// +1 for initial request
assertEquals(ReplicationInternal.MAX_RETRIES + 1, bulkGetRequests.size());
} finally {
server.shutdown();
}
} finally {
CouchbaseLiteHttpClientFactory.DEFAULT_CONNECTION_TIMEOUT_SECONDS = def1;
CouchbaseLiteHttpClientFactory.DEFAULT_SO_TIMEOUT_SECONDS = def2;
ReplicationInternal.MAX_RETRIES = def3;
ReplicationInternal.RETRY_DELAY_SECONDS = def4;
}
}
// ReplicatorInternal.m: test_UseRemoteUUID
public void testUseRemoteUUID() throws Exception {
URL remoteURL1 = new URL("http://alice.local:55555/db");
Replication r1 = database.createPullReplication(remoteURL1);
r1.setRemoteUUID("cafebabe");
String check1 = r1.replicationInternal.remoteCheckpointDocID();
// Different URL, but same remoteUUID:
URL remoteURL2 = new URL("http://alice17.local:44444/db");
Replication r2 = database.createPullReplication(remoteURL2);
r2.setRemoteUUID("cafebabe");
String check2 = r2.replicationInternal.remoteCheckpointDocID();
assertEquals(check1, check2);
// Same UUID but different filter settings:
Replication r3 = database.createPullReplication(remoteURL2);
r3.setRemoteUUID("cafebabe");
r3.setFilter("Melitta");
String check3 = r3.replicationInternal.remoteCheckpointDocID();
assertNotSame(check2, check3);
}
/**
* This test is almost identical with
* TestCase(CBL_Pusher_DocIDs) in CBLReplicator_Tests.m
*/
public void testPushReplicationSetDocumentIDs() throws Exception {
// Create documents:
createDocumentForPushReplication("doc1", null, null);
createDocumentForPushReplication("doc2", null, null);
createDocumentForPushReplication("doc3", null, null);
createDocumentForPushReplication("doc4", null, null);
MockWebServer server = null;
try {
// Create mock server and play:
MockDispatcher dispatcher = new MockDispatcher();
server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
server.play();
// Checkpoint GET response w/ 404 + respond to all PUT Checkpoint requests:
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(50);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing:
MockRevsDiff mockRevsDiff = new MockRevsDiff();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// Create push replication:
Replication replication = database.createPushReplication(server.getUrl("/db"));
replication.setDocIds(Arrays.asList(new String[] {"doc2", "doc3"}));
// check pending document IDs:
Set<String> pendingDocIDs = replication.getPendingDocumentIDs();
assertEquals(2, pendingDocIDs.size());
assertFalse(pendingDocIDs.contains("doc1"));
assertTrue(pendingDocIDs.contains("doc2"));
assertTrue(pendingDocIDs.contains("doc3"));
assertFalse(pendingDocIDs.contains("doc4"));
// Run replication:
runReplication(replication);
// Check result:
RecordedRequest bulkDocsRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(bulkDocsRequest);
assertFalse(MockHelper.getUtf8Body(bulkDocsRequest).contains("doc1"));
assertTrue(MockHelper.getUtf8Body(bulkDocsRequest).contains("doc2"));
assertTrue(MockHelper.getUtf8Body(bulkDocsRequest).contains("doc3"));
assertFalse(MockHelper.getUtf8Body(bulkDocsRequest).contains("doc4"));
} finally {
if (server != null)
server.shutdown();
}
}
public void testPullReplicationSetDocumentIDs() throws Exception {
MockWebServer server = null;
try {
// Create mock server and play:
MockDispatcher dispatcher = new MockDispatcher();
server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
server.play();
// checkpoint PUT or GET response (sticky):
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _changes response:
MockChangesFeed mockChangesFeed = new MockChangesFeed();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// Run pull replication:
Replication replication = database.createPullReplication(server.getUrl("/db"));
replication.setDocIds(Arrays.asList(new String[] {"doc2", "doc3"}));
runReplication(replication);
// Check changes feed request:
RecordedRequest getChangesFeedRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES);
assertTrue(getChangesFeedRequest.getMethod().equals("POST"));
String body = getChangesFeedRequest.getUtf8Body();
Map<String, Object> jsonMap = Manager.getObjectMapper().readValue(body, Map.class);
assertTrue(jsonMap.containsKey("filter"));
String filter = (String) jsonMap.get("filter");
assertEquals("_doc_ids", filter);
List<String> docIDs = (List<String>) jsonMap.get("doc_ids");
assertNotNull(docIDs);
assertEquals(2, docIDs.size());
assertTrue(docIDs.contains("doc2"));
assertTrue(docIDs.contains("doc3"));
} finally {
if (server != null)
server.shutdown();
}
}
public void testPullWithGzippedChangesFeed() throws Exception {
MockWebServer server = null;
try {
// Create mock server and play:
MockDispatcher dispatcher = new MockDispatcher();
server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
server.play();
// Mock documents to be pulled:
MockDocumentGet.MockDocument mockDoc1 =
new MockDocumentGet.MockDocument("doc1", "1-5e38", 1);
mockDoc1.setJsonMap(MockHelper.generateRandomJsonMap());
MockDocumentGet.MockDocument mockDoc2 =
new MockDocumentGet.MockDocument("doc2", "1-563b", 2);
mockDoc2.setJsonMap(MockHelper.generateRandomJsonMap());
// // checkpoint GET response w/ 404:
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// _changes response:
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc1));
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc2));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES,
mockChangesFeed.generateMockResponse(/*gzip*/true));
// doc1 response:
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDoc1);
dispatcher.enqueueResponse(mockDoc1.getDocPathRegex(),
mockDocumentGet.generateMockResponse());
// doc2 response:
mockDocumentGet = new MockDocumentGet(mockDoc2);
dispatcher.enqueueResponse(mockDoc2.getDocPathRegex(),
mockDocumentGet.generateMockResponse());
// _bulk_get response:
MockDocumentBulkGet mockBulkGet = new MockDocumentBulkGet();
mockBulkGet.addDocument(mockDoc1);
mockBulkGet.addDocument(mockDoc2);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_GET, mockBulkGet);
// Respond to all PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(500);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// Setup database change listener:
final List<String> changeDocIDs = new ArrayList<String>();
database.addChangeListener(new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
for (DocumentChange change : event.getChanges()) {
changeDocIDs.add(change.getDocumentId());
}
}
});
// Run pull replication:
Replication replication = database.createPullReplication(server.getUrl("/db"));
runReplication(replication);
// Check result:
assertEquals(2, changeDocIDs.size());
String[] docIDs = changeDocIDs.toArray(new String[changeDocIDs.size()]);
Arrays.sort(docIDs);
assertTrue(Arrays.equals(new String[]{"doc1", "doc2"}, docIDs));
// Check changes feed request:
RecordedRequest changesFeedRequest =
dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES);
String acceptEncoding = changesFeedRequest.getHeader("Accept-Encoding");
assertNotNull(acceptEncoding);
assertTrue(acceptEncoding.contains("gzip"));
} finally {
if (server != null)
server.shutdown();
}
}
}
|
package org.ovirt.engine.core.vdsbroker;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.ovirt.engine.core.common.FeatureSupported;
import org.ovirt.engine.core.common.businessentities.ArchitectureType;
import org.ovirt.engine.core.common.businessentities.DisplayType;
import org.ovirt.engine.core.common.businessentities.Entities;
import org.ovirt.engine.core.common.businessentities.GraphicsDevice;
import org.ovirt.engine.core.common.businessentities.GraphicsType;
import org.ovirt.engine.core.common.businessentities.IVdsEventListener;
import org.ovirt.engine.core.common.businessentities.OriginType;
import org.ovirt.engine.core.common.businessentities.VDS;
import org.ovirt.engine.core.common.businessentities.VDSGroup;
import org.ovirt.engine.core.common.businessentities.VM;
import org.ovirt.engine.core.common.businessentities.VmDevice;
import org.ovirt.engine.core.common.businessentities.VmDeviceGeneralType;
import org.ovirt.engine.core.common.businessentities.VmDeviceId;
import org.ovirt.engine.core.common.businessentities.VmDynamic;
import org.ovirt.engine.core.common.businessentities.VmGuestAgentInterface;
import org.ovirt.engine.core.common.businessentities.VmJob;
import org.ovirt.engine.core.common.businessentities.VmStatic;
import org.ovirt.engine.core.common.businessentities.VmStatistics;
import org.ovirt.engine.core.common.businessentities.network.VmNetworkInterface;
import org.ovirt.engine.core.common.businessentities.network.VmNetworkStatistics;
import org.ovirt.engine.core.common.businessentities.storage.DiskImage;
import org.ovirt.engine.core.common.businessentities.storage.DiskImageDynamic;
import org.ovirt.engine.core.common.businessentities.storage.LUNs;
import org.ovirt.engine.core.common.config.Config;
import org.ovirt.engine.core.common.config.ConfigValues;
import org.ovirt.engine.core.common.osinfo.OsRepository;
import org.ovirt.engine.core.common.utils.Pair;
import org.ovirt.engine.core.common.utils.SimpleDependecyInjector;
import org.ovirt.engine.core.common.utils.VmDeviceCommonUtils;
import org.ovirt.engine.core.common.utils.VmDeviceType;
import org.ovirt.engine.core.common.vdscommands.FullListVDSCommandParameters;
import org.ovirt.engine.core.common.vdscommands.VDSCommandType;
import org.ovirt.engine.core.common.vdscommands.VDSReturnValue;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.compat.TransactionScopeOption;
import org.ovirt.engine.core.compat.Version;
import org.ovirt.engine.core.dal.dbbroker.DbFacade;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogDirector;
import org.ovirt.engine.core.utils.transaction.TransactionMethod;
import org.ovirt.engine.core.utils.transaction.TransactionSupport;
import org.ovirt.engine.core.vdsbroker.vdsbroker.VdsBrokerObjectsBuilder;
import org.ovirt.engine.core.vdsbroker.vdsbroker.VdsProperties;
import org.ovirt.engine.core.vdsbroker.vdsbroker.entities.VmInternalData;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* invoke all Vm analyzers in hand and iterate over their report
* and take actions - fire VDSM commands (destroy,run/rerun,migrate), report complete actions,
* hand-over migration and save-to-db
*/
public class VmsMonitoring {
/*
When importing external VMs or the Hosted Engine VM there are devices which do not have an address field but we
want them to be imported.
*/
private static final List<String> graphicsDevices = Arrays.asList(
GraphicsType.VNC.name().toLowerCase(),
GraphicsType.SPICE.name().toLowerCase()
);
private final boolean timeToUpdateVmStatistics;
private final long fetchTime;
private VdsManager vdsManager;
private DbFacade dbFacade;
/**
* The Vms we want to monitor and analyze for changes.
* VM object represent the persisted object(namely the one in db) and the VmInternalData
* is the running one as reported from VDSM
*/
private List<Pair<VM, VmInternalData>> monitoredVms;
/**
* A collection of VMs that has changes in devices.
*/
private List<Pair<VM, VmInternalData>> vmsWithChangedDevices;
private final AuditLogDirector auditLogDirector;
/**
* The managers of the monitored VMs in this cycle.
*/
private Map<Guid, VmManager> vmManagers = new HashMap<>();
/**
* The analyzers which hold all the data per a VM
*/
private List<VmAnalyzer> vmAnalyzers = new ArrayList<>();
//*** data collectors ***//
private final Map<Guid, VmDynamic> vmDynamicToSave = new HashMap();
private final List<VmStatistics> vmStatisticsToSave = new ArrayList<>();
private final List<List<VmNetworkInterface>> vmInterfaceStatisticsToSave = new ArrayList<>();
private final Collection<Pair<Guid, DiskImageDynamic>> vmDiskImageDynamicToSave = new LinkedList<>();
private final List<VmDevice> vmDeviceToSave = new ArrayList<>();
private final Map<Guid, List<VmGuestAgentInterface>> vmGuestAgentNics = new HashMap<>();
private final List<VmDynamic> poweringUpVms = new ArrayList<>();
private final List<VmDevice> newVmDevices = new ArrayList<>();
private final List<VmDeviceId> removedDeviceIds = new ArrayList<>();
private final List<LUNs> vmLunDisksToSave = new ArrayList<>();
private final List<Guid> autoVmsToRun = new ArrayList<>();
private final List<VmStatic> externalVmsToAdd = new ArrayList<>();
private final Map<Guid, VmJob> vmJobsToUpdate = new HashMap<>();
private final List<Guid> vmJobIdsToRemove = new ArrayList<>();
private final List<Guid> existingVmJobIds = new ArrayList<>();
private List<Pair<VM, VmInternalData>> externalVms = new ArrayList<>();
//*** data collectors ***//
private static final String EXTERNAL_VM_NAME_FORMAT = "external-%1$s";
private static final Logger log = LoggerFactory.getLogger(VmsMonitoring.class);
/**
* @param vdsManager the host manager related to this cycle.
* @param monitoredVms the vms we want to monitor/analyze/react on. this structure is
* a pair of the persisted (db currently) VM and the running VM which was reported from vdsm.
* Analysis and reactions would be taken on those VMs only.
* @param vmsWithChangedDevices
* @param auditLogDirector
*/
public VmsMonitoring(
VdsManager vdsManager,
List<Pair<VM, VmInternalData>> monitoredVms,
List<Pair<VM, VmInternalData>> vmsWithChangedDevices,
AuditLogDirector auditLogDirector,
long fetchTime) {
this(vdsManager, monitoredVms, vmsWithChangedDevices, auditLogDirector, fetchTime, false);
}
public VmsMonitoring(
VdsManager vdsManager,
List<Pair<VM, VmInternalData>> monitoredVms,
List<Pair<VM, VmInternalData>> vmsWithChangedDevices,
AuditLogDirector auditLogDirector,
long fetchTime,
boolean timeToUpdateVmStatistics) {
this.vdsManager = vdsManager;
this.monitoredVms = monitoredVms;
this.vmsWithChangedDevices = vmsWithChangedDevices;
this.auditLogDirector = auditLogDirector;
this.fetchTime = fetchTime;
this.timeToUpdateVmStatistics = timeToUpdateVmStatistics;
dbFacade = DbFacade.getInstance();
}
/**
* analyze and react upon changes on the monitoredVms. relevant changes would
* be persisted and state transitions and internal commands would
* take place accordingly.
*/
public void perform() {
try {
refreshExistingVmJobList();
refreshVmStats();
afterVMsRefreshTreatment();
vdsManager.vmsMonitoringInitFinished();
} catch (RuntimeException ex) {
log.error("Failed during vms monitoring on host {} error is: {}", vdsManager.getVdsName(), ex);
log.error("Exception:", ex);
} finally {
unlockVmsManager();
}
}
protected boolean isTimeToUpdateVmStatistics() {
return timeToUpdateVmStatistics;
}
/**
* lock Vms which has db entity i.e they are managed by a VmManager
* @param pair
* @return true if lock acquired
*/
private boolean tryLockVmForUpdate(Pair<VM, VmInternalData> pair) {
Guid vmId = getVmId(pair);
if (vmId != null) {
VmManager vmManager = getResourceManager().getVmManager(vmId);
if (vmManager.trylock()) {
if (!vmManager.isLatestData(pair.getSecond(), vdsManager.getVdsId())) {
log.warn("skipping VM '{}' from this monitoring cycle" +
" - newer VM data was already processed", vmId);
vmManager.unlock();
} else if (vmManager.getVmDataChangedTime() != null && fetchTime - vmManager.getVmDataChangedTime() <= 0) {
log.warn("skipping VM '{}' from this monitoring cycle" +
" - the VM data has changed since fetching the data", vmId);
vmManager.unlock();
} else {
// store the locked managers to finally release them at the end of the cycle
vmManagers.put(vmId, vmManager);
return true;
}
} else {
log.debug("skipping VM '{}' from this monitoring cycle" +
" - the VM is locked by its VmManager ", getVmId(pair));
}
}
return false;
}
private void unlockVmsManager() {
for (VmManager vmManager : vmManagers.values()) {
vmManager.updateVmDataChangedTime();
vmManager.unlock();
}
}
/**
* Analyze the VM data pair
* Skip analysis on VMs which cannot be locked
* note: metrics calculation like memCommited and vmsCoresCount should be calculated *before*
* this filtering.
*/
private void refreshVmStats() {
for (Pair<VM, VmInternalData> monitoredVm : monitoredVms) {
// TODO filter out migratingTo VMs if no action is taken on them
if (tryLockVmForUpdate(monitoredVm)) {
VmAnalyzer vmAnalyzer = getVmAnalyzer(monitoredVm);
vmAnalyzers.add(vmAnalyzer);
vmAnalyzer.analyze();
if (vmAnalyzer.isExternalVm()) {
externalVms.add(new Pair<>(vmAnalyzer.getDbVm(), vmAnalyzer.getVdsmVm()));
}
}
}
updateHEcluster();
processExternallyManagedVms();
processVmsWithDevicesChange();
saveVmsToDb();
}
protected VmAnalyzer getVmAnalyzer(Pair<VM, VmInternalData> pair) {
return new VmAnalyzer(
pair.getFirst(),
pair.getSecond(),
this,
auditLogDirector);
}
private void afterVMsRefreshTreatment() {
Collection<Guid> movedToDownVms = new ArrayList<>();
List<Guid> succeededToRunVms = new ArrayList<>();
// now loop over the result and act
for (VmAnalyzer vmAnalyzer : vmAnalyzers) {
// rerun all vms from rerun list
if (vmAnalyzer.isRerun()) {
log.error("Rerun VM '{}'. Called from VDS '{}'", vmAnalyzer.getDbVm().getId(), vdsManager.getVdsName());
ResourceManager.getInstance().RerunFailedCommand(vmAnalyzer.getDbVm().getId(), vdsManager.getVdsId());
}
if (vmAnalyzer.isSuccededToRun()) {
vdsManager.succeededToRunVm(vmAnalyzer.getDbVm().getId());
succeededToRunVms.add(vmAnalyzer.getDbVm().getId());
}
// Refrain from auto-start HA VM during its re-run attempts.
if (vmAnalyzer.isAutoVmToRun() && !vmAnalyzer.isRerun()) {
autoVmsToRun.add(vmAnalyzer.getDbVm().getId());
}
// process all vms that their ip changed.
if (vmAnalyzer.isClientIpChanged()) {
final VmDynamic vmDynamic = vmAnalyzer.getVdsmVm().getVmDynamic();
getVdsEventListener().processOnClientIpChange(vmDynamic.getId(),
vmDynamic.getClientIp());
}
// process all vms that powering up.
if (vmAnalyzer.isPoweringUp()) {
getVdsEventListener().processOnVmPoweringUp(vmAnalyzer.getVdsmVm().getVmDynamic().getId());
}
if (vmAnalyzer.isMovedToDown()) {
movedToDownVms.add(vmAnalyzer.getDbVm().getId());
}
if (vmAnalyzer.isRemoveFromAsync()) {
ResourceManager.getInstance().RemoveAsyncRunningVm(vmAnalyzer.getDbVm().getId());
}
if (vmAnalyzer.isHostedEngineUnmanaged()) {
// @since 3.6 - we take existing HE VM and reimport it
importHostedEngineVM(getVmInfo(Collections.singletonList(vmAnalyzer.getVdsmVm()
.getVmDynamic()
.getId()
.toString()))[0]);
}
}
getVdsEventListener().updateSlaPolicies(succeededToRunVms, vdsManager.getVdsId());
// run all vms that crashed that marked with auto startup
getVdsEventListener().runFailedAutoStartVMs(autoVmsToRun);
// process all vms that went down
getVdsEventListener().processOnVmStop(movedToDownVms, vdsManager.getVdsId());
getVdsEventListener().refreshHostIfAnyVmHasHostDevices(succeededToRunVms, vdsManager.getVdsId());
}
// Visible for testing
protected void importHostedEngineVM(Map vmStruct) {
VM vm = VdsBrokerObjectsBuilder.buildVmsDataFromExternalProvider(vmStruct);
if (vm != null) {
vm.setImages(VdsBrokerObjectsBuilder.buildDiskImagesFromDevices(vmStruct));
vm.setInterfaces(VdsBrokerObjectsBuilder.buildVmNetworkInterfacesFromDevices(vmStruct));
for (DiskImage diskImage : vm.getImages()) {
vm.getDiskMap().put(Guid.newGuid(), diskImage);
}
vm.setVdsGroupId(getVdsManager().getVdsGroupId());
vm.setRunOnVds(getVdsManager().getVdsId());
// Search for spice or vnc devices and add the result as a managed device to the VM
for (Object o : (Object[]) vmStruct.get(VdsProperties.Devices)) {
Map device = (Map<String, Object>) o;
String deviceName = (String)device.get(VdsProperties.Device);
if (graphicsDevices.contains(deviceName)){
GraphicsDevice graphicsDevice = new GraphicsDevice(VmDeviceType.valueOf(deviceName.toUpperCase()));
graphicsDevice.setVmId(vm.getId());
graphicsDevice.setDeviceId(Guid.newGuid());
vm.setSingleQxlPci(false);
if (graphicsDevice.getGraphicsType() == GraphicsType.VNC) {
vm.setDefaultDisplayType(DisplayType.cirrus);
} else {
vm.setDefaultDisplayType(DisplayType.qxl);
}
vm.getManagedVmDeviceMap().put(graphicsDevice.getDeviceId(), graphicsDevice);
break;
}
}
getVdsEventListener().importHostedEngineVm(vm);
}
}
private void processVmsWithDevicesChange() {
// Handle VM devices were changed (for 3.1 cluster and above)
if (!VmDeviceCommonUtils.isOldClusterVersion(vdsManager.getGroupCompatibilityVersion())) {
// If there are vms that require updating,
// get the new info from VDSM in one call, and then update them all
if (!vmsWithChangedDevices.isEmpty()) {
ArrayList<String> vmsToUpdate = new ArrayList<>(vmsWithChangedDevices.size());
for (Pair<VM, VmInternalData> pair : vmsWithChangedDevices) {
Guid vmId = pair.getFirst().getId();
// update only if the vm marked to change, otherwise it might have skipped because data invalidated
// this ensure the vmManager lock is taken
if (vmDynamicToSave.containsKey(vmId)) {
vmDynamicToSave.get(vmId).setHash(pair.getSecond().getVmDynamic().getHash());
vmsToUpdate.add(vmId.toString());
} else {
log.warn("VM '{}' not in changed list, skipping devices update.", vmId);
}
}
updateVmDevices(vmsToUpdate);
}
}
}
private void saveVmsToDb() {
getDbFacade().getVmDynamicDao().updateAllInBatch(vmDynamicToSave.values());
getDbFacade().getVmStatisticsDao().updateAllInBatch(vmStatisticsToSave);
final List<VmNetworkStatistics> allVmInterfaceStatistics = new LinkedList<VmNetworkStatistics>();
for (List<VmNetworkInterface> list : vmInterfaceStatisticsToSave) {
for (VmNetworkInterface iface : list) {
allVmInterfaceStatistics.add(iface.getStatistics());
}
}
getDbFacade().getVmNetworkStatisticsDao().updateAllInBatch(allVmInterfaceStatistics);
getDbFacade().getDiskImageDynamicDao().updateAllDiskImageDynamicWithDiskIdByVmId(vmDiskImageDynamicToSave);
getDbFacade().getLunDao().updateAllInBatch(vmLunDisksToSave);
getVdsEventListener().addExternallyManagedVms(externalVmsToAdd);
saveVmDevicesToDb();
saveVmGuestAgentNetworkDevices();
saveVmJobsToDb();
}
protected void setOsId(VmStatic vmStatic, String guestOsNameFromVdsm, int defaultArchOsId) {
if (StringUtils.isEmpty(guestOsNameFromVdsm)) {
log.debug("VM '{}': setting default OS ID: '{}'", vmStatic.getName(), defaultArchOsId);
vmStatic.setOsId(defaultArchOsId);
}
}
protected void setDisplayType(VmStatic vmStatic, String displayTypeFromVdsm, DisplayType defaultDisplayType) {
if (StringUtils.isEmpty(displayTypeFromVdsm)) {
log.debug("VM '{}': setting default display type: '{}'", vmStatic.getName(), defaultDisplayType.getValue());
vmStatic.setDefaultDisplayType(defaultDisplayType);
}
}
private int getDefaultOsId(ArchitectureType architecture) {
OsRepository osRepository = SimpleDependecyInjector.getInstance().get(OsRepository.class);
Integer defaultArchOsId = osRepository.getDefaultOSes().get(architecture);
return (defaultArchOsId == null) ? 0 : defaultArchOsId;
}
private DisplayType getDefaultDisplayType(int osId, Version clusterVersion) {
OsRepository osRepository = SimpleDependecyInjector.getInstance().get(OsRepository.class);
List<Pair<GraphicsType, DisplayType>> pairs = osRepository.getGraphicsAndDisplays(osId, clusterVersion);
if (!pairs.isEmpty()) {
Pair<GraphicsType, DisplayType> graphicsDisplayPair = pairs.get(0);
return graphicsDisplayPair.getSecond();
}
return DisplayType.qxl;
}
protected void updateHEcluster() {
List<VmStatic> byName = dbFacade.getVmStaticDao().getAllByName(Config.<String>getValue(ConfigValues.HostedEngineVmName));
String cluster_id_from_vm = byName.get(0).getVdsGroupId().toString();
Guid host_id = dbFacade.getVmDynamicDao().get(byName.get(0).getId()).getRunOnVds();
if (null != host_id) {
Guid cluster_id_from_host = dbFacade.getVdsDao().get(host_id).getVdsGroupId();
if (!cluster_id_from_vm.equals(cluster_id_from_host.toString())) {
byName.get(0).setVdsGroupId(cluster_id_from_host);
dbFacade.getVmStaticDao().update(byName.get(0));
}
else{
log.info(" hostedEngine ");
}
}
}
protected void processExternallyManagedVms() {
// Fetching for details from the host
// and marking the VMs for addition
List<String> vmsToQuery = new ArrayList<>(externalVms.size());
for (Pair<VM, VmInternalData> pair : externalVms) {
vmsToQuery.add(pair.getSecond().getVmDynamic().getId().toString());
}
if (!vmsToQuery.isEmpty()) {
VDSGroup vdsGroup = getDbFacade().getVdsGroupDao().get(vdsManager.getVdsGroupId());
int defaultOsId = getDefaultOsId(vdsGroup.getArchitecture());
DisplayType defaultDisplayType = getDefaultDisplayType(defaultOsId, vdsGroup.getCompatibilityVersion());
// Query VDSM for VMs info, and creating a proper VMStatic to be used when importing them
Map[] vmsInfo = getVmInfo(vmsToQuery);
for (Map vmInfo : vmsInfo) {
convertVm(defaultOsId, defaultDisplayType, vmInfo);
}
}
}
// Visible for testing
protected void convertVm(int defaultOsId, DisplayType defaultDisplayType, Map vmInfo) {
Guid vmId = Guid.createGuidFromString((String) vmInfo.get(VdsProperties.vm_guid));
VmStatic vmStatic = new VmStatic();
vmStatic.setId(vmId);
vmStatic.setCreationDate(new Date());
vmStatic.setVdsGroupId(vdsManager.getVdsGroupId());
String vmNameOnHost = (String) vmInfo.get(VdsProperties.vm_name);
if (StringUtils.equals(Config.<String>getValue(ConfigValues.HostedEngineVmName), vmNameOnHost)) {
// its a hosted engine VM -> import it and skip the external VM phase
importHostedEngineVM(vmInfo);
return;
} else {
vmStatic.setName(String.format(EXTERNAL_VM_NAME_FORMAT, vmNameOnHost));
vmStatic.setOrigin(OriginType.EXTERNAL);
}
vmStatic.setNumOfSockets(VdsBrokerObjectsBuilder.parseIntVdsProperty(vmInfo.get(VdsProperties.num_of_cpus)));
vmStatic.setMemSizeMb(VdsBrokerObjectsBuilder.parseIntVdsProperty(vmInfo.get(VdsProperties.mem_size_mb)));
vmStatic.setSingleQxlPci(false);
setOsId(vmStatic, (String) vmInfo.get(VdsProperties.guest_os), defaultOsId);
setDisplayType(vmStatic, (String) vmInfo.get(VdsProperties.displayType), defaultDisplayType);
log.info("Importing VM '{}' as '{}', as it is running on the on Host, but does not exist in the engine.", vmNameOnHost, vmStatic.getName());
externalVmsToAdd.add(vmStatic);
}
private void saveVmGuestAgentNetworkDevices() {
if (!vmGuestAgentNics.isEmpty()) {
TransactionSupport.executeInScope(TransactionScopeOption.Required,
new TransactionMethod<Void>() {
@Override
public Void runInTransaction() {
for (Guid vmId : vmGuestAgentNics.keySet()) {
getDbFacade().getVmGuestAgentInterfaceDao().removeAllForVm(vmId);
}
for (List<VmGuestAgentInterface> nics : vmGuestAgentNics.values()) {
if (nics != null) {
for (VmGuestAgentInterface nic : nics) {
getDbFacade().getVmGuestAgentInterfaceDao().save(nic);
}
}
}
return null;
}
}
);
}
}
private void saveVmDevicesToDb() {
getDbFacade().getVmDeviceDao().updateAllInBatch(vmDeviceToSave);
if (!removedDeviceIds.isEmpty()) {
TransactionSupport.executeInScope(TransactionScopeOption.Required,
new TransactionMethod<Void>() {
@Override
public Void runInTransaction() {
getDbFacade().getVmDeviceDao().removeAll(removedDeviceIds);
return null;
}
});
}
if (!newVmDevices.isEmpty()) {
TransactionSupport.executeInScope(TransactionScopeOption.Required,
new TransactionMethod<Void>() {
@Override
public Void runInTransaction() {
getDbFacade().getVmDeviceDao().saveAll(newVmDevices);
return null;
}
});
}
}
private void saveVmJobsToDb() {
getDbFacade().getVmJobDao().updateAllInBatch(vmJobsToUpdate.values());
if (!vmJobIdsToRemove.isEmpty()) {
TransactionSupport.executeInScope(TransactionScopeOption.Required,
new TransactionMethod<Void>() {
@Override
public Void runInTransaction() {
getDbFacade().getVmJobDao().removeAll(vmJobIdsToRemove);
return null;
}
});
}
}
private void refreshExistingVmJobList() {
existingVmJobIds.clear();
existingVmJobIds.addAll(getDbFacade().getVmJobDao().getAllIds());
}
/**
* Update the given list of VMs properties in DB
*
* @param vmsToUpdate
*/
protected void updateVmDevices(List<String> vmsToUpdate) {
if (vmsToUpdate.isEmpty()) {
return;
}
Map[] vms = getVmInfo(vmsToUpdate);
if (vms != null) {
for (Map vm : vms) {
processVmDevices(vm);
}
}
}
/**
* Actually process the VM device update in DB.
*
* @param vm
*/
protected void processVmDevices(Map vm) {
if (vm == null || vm.get(VdsProperties.vm_guid) == null) {
log.error("Received NULL VM or VM id when processing VM devices, abort.");
return;
}
Guid vmId = new Guid((String) vm.get(VdsProperties.vm_guid));
Set<Guid> processedDevices = new HashSet<Guid>();
List<VmDevice> devices = getDbFacade().getVmDeviceDao().getVmDeviceByVmId(vmId);
Map<VmDeviceId, VmDevice> deviceMap = Entities.businessEntitiesById(devices);
for (Object o : (Object[]) vm.get(VdsProperties.Devices)) {
Map device = (Map<String, Object>) o;
if (device.get(VdsProperties.Address) == null) {
logDeviceInformation(vmId, device);
continue;
}
Guid deviceId = getDeviceId(device);
VmDevice vmDevice = deviceMap.get(new VmDeviceId(deviceId, vmId));
if (vmDevice == null) {
vmDevice = getByDeviceType((String) device.get(VdsProperties.Device), deviceMap);
deviceId = vmDevice != null ? vmDevice.getDeviceId() : deviceId;
}
String logicalName = null;
if (deviceId != null && FeatureSupported.reportedDisksLogicalNames(getVdsManager().getGroupCompatibilityVersion()) &&
VmDeviceType.DISK.getName().equals(device.get(VdsProperties.Device))) {
try {
logicalName = getDeviceLogicalName((Map<?, ?>) vm.get(VdsProperties.GuestDiskMapping), deviceId);
} catch (Exception e) {
log.error("error while getting device name when processing, vm '{}', device info '{}' with exception, skipping '{}'",
vmId, device, e.getMessage());
log.error("Exception", e);
}
}
if (deviceId == null || vmDevice == null) {
deviceId = addNewVmDevice(vmId, device, logicalName);
} else {
vmDevice.setIsPlugged(Boolean.TRUE);
vmDevice.setAddress(((Map<String, String>) device.get(VdsProperties.Address)).toString());
vmDevice.setAlias(StringUtils.defaultString((String) device.get(VdsProperties.Alias)));
vmDevice.setLogicalName(logicalName);
addVmDeviceToList(vmDevice);
}
processedDevices.add(deviceId);
}
handleRemovedDevices(vmId, processedDevices, devices);
}
/**
* Some of the devices need special treatment:
* virtio-serial: this device was unmanaged before 3.6 and since 3.6 it is managed.
* if the VM is running while the engine is upgraded we might still get it as unmanaged
* from VDSM and since we generate IDs for unmanaged devices, we won't be able to find
* it by its ID. therefore, we check by its type, assuming that there is only one
* virtio-serial per VM.
*
*/
private VmDevice getByDeviceType(String deviceTypeName, Map<?, VmDevice> dbDevices) {
if (VmDeviceType.VIRTIOSERIAL.getName().equals(deviceTypeName)) {
return VmDeviceCommonUtils.findVmDeviceByType(dbDevices, deviceTypeName);
}
return null;
}
private String getDeviceLogicalName(Map<?, ?> diskMapping, Guid deviceId) {
if (diskMapping == null) {
return null;
}
Map<?, ?> deviceMapping = null;
String modifiedDeviceId = deviceId.toString().substring(0, 20);
for (Map.Entry<?, ?> entry : diskMapping.entrySet()) {
String serial = (String) entry.getKey();
if (serial != null && serial.contains(modifiedDeviceId)) {
deviceMapping = (Map<?, ?>) entry.getValue();
break;
}
}
return deviceMapping == null ? null : (String) deviceMapping.get(VdsProperties.Name);
}
/**
* Removes unmanaged devices from DB if were removed by libvirt. Empties device address with isPlugged = false
*
* @param vmId
* @param processedDevices
*/
private void handleRemovedDevices(Guid vmId, Set<Guid> processedDevices, List<VmDevice> devices) {
for (VmDevice device : devices) {
if (processedDevices.contains(device.getDeviceId())) {
continue;
}
if (deviceWithoutAddress(device)) {
continue;
}
if (device.getIsManaged()) {
if (device.getIsPlugged()) {
device.setIsPlugged(Boolean.FALSE);
device.setAddress("");
addVmDeviceToList(device);
log.debug("VM '{}' managed pluggable device was unplugged : '{}'", vmId, device);
} else if (!devicePluggable(device)) {
log.error("VM '{}' managed non pluggable device was removed unexpectedly from libvirt: '{}'",
vmId, device);
}
} else {
removedDeviceIds.add(device.getId());
log.debug("VM '{}' unmanaged device was marked for remove : {1}", vmId, device);
}
}
}
private boolean devicePluggable(VmDevice device) {
return VmDeviceCommonUtils.isDisk(device) || VmDeviceCommonUtils.isBridge(device);
}
/**
* Libvirt gives no address to some special devices, and we know it.
*/
private boolean deviceWithoutAddress(VmDevice device) {
return VmDeviceCommonUtils.isGraphics(device);
}
/**
* Adds new devices recognized by libvirt
*
* @param vmId
* @param device
*/
private Guid addNewVmDevice(Guid vmId, Map device, String logicalName) {
Guid newDeviceId = Guid.Empty;
String typeName = (String) device.get(VdsProperties.Type);
String deviceName = (String) device.get(VdsProperties.Device);
// do not allow null or empty device or type values
if (StringUtils.isEmpty(typeName) || StringUtils.isEmpty(deviceName)) {
log.error("Empty or NULL values were passed for a VM '{}' device, Device is skipped", vmId);
} else {
String address = device.get(VdsProperties.Address).toString();
String alias = StringUtils.defaultString((String) device.get(VdsProperties.Alias));
Object o = device.get(VdsProperties.SpecParams);
newDeviceId = Guid.newGuid();
VmDeviceId id = new VmDeviceId(newDeviceId, vmId);
VmDevice newDevice = new VmDevice(id, VmDeviceGeneralType.forValue(typeName), deviceName, address,
0,
o == null ? new HashMap<String, Object>() : (Map<String, Object>) o,
false,
true,
Boolean.getBoolean((String) device.get(VdsProperties.ReadOnly)),
alias,
null,
null,
logicalName);
newVmDevices.add(newDevice);
log.debug("New device was marked for adding to VM '{}' Devices : '{}'", vmId, newDevice);
}
return newDeviceId;
}
/**
* gets the device id from the structure returned by VDSM device ids are stored in specParams map
*
* @param device
* @return
*/
private static Guid getDeviceId(Map device) {
String deviceId = (String) device.get(VdsProperties.DeviceId);
return deviceId == null ? null : new Guid(deviceId);
}
/**
* gets VM full information for the given list of VMs
*
* @param vmsToUpdate
* @return
*/
protected Map[] getVmInfo(List<String> vmsToUpdate) {
// TODO refactor commands to use vdsId only - the whole vds object here is useless
VDS vds = new VDS();
vds.setId(vdsManager.getVdsId());
Map[] result = {};
VDSReturnValue vdsReturnValue = getResourceManager().runVdsCommand(VDSCommandType.FullList,
new FullListVDSCommandParameters(vds, vmsToUpdate));
if (vdsReturnValue.getSucceeded()) {
result = (Map[]) (vdsReturnValue.getReturnValue());
}
return result;
}
private boolean shouldLogDeviceDetails(String deviceType) {
return !StringUtils.equalsIgnoreCase(deviceType, VmDeviceType.FLOPPY.getName());
}
private void logDeviceInformation(Guid vmId, Map device) {
String message = "Received a {} Device without an address when processing VM {} devices, skipping device";
String deviceType = (String) device.get(VdsProperties.Device);
if (shouldLogDeviceDetails(deviceType)) {
Map<String, Object> deviceInfo = device;
log.info(message + ": {}", StringUtils.defaultString(deviceType), vmId, deviceInfo);
} else {
log.info(message, StringUtils.defaultString(deviceType), vmId);
}
}
private Guid getVmId(Pair<VM, VmInternalData> pair) {
return (pair.getFirst() != null) ?
pair.getFirst().getId() :
((pair.getSecond() != null) ? pair.getSecond().getVmDynamic().getId() : null);
}
/**
* Add or update vmDynamic to save list
*
* @param vmDynamic
*/
protected void addVmDynamicToList(VmDynamic vmDynamic) {
vmDynamicToSave.put(vmDynamic.getId(), vmDynamic);
}
/**
* Add or update vmStatistics to save list
*
* @param vmStatistics
*/
protected void addVmStatisticsToList(VmStatistics vmStatistics) {
vmStatisticsToSave.add(vmStatistics);
}
protected void addVmInterfaceStatisticsToList(List<VmNetworkInterface> list) {
if (list.isEmpty()) {
return;
}
vmInterfaceStatisticsToSave.add(list);
}
/**
* Add or update vmDynamic to save list
*
* @param vmDevice
*/
private void addVmDeviceToList(VmDevice vmDevice) {
vmDeviceToSave.add(vmDevice);
}
/**
* An access method for test usages
*
* @return The devices to be added to the database
*/
protected List<VmDevice> getNewVmDevices() {
return Collections.unmodifiableList(newVmDevices);
}
/**
* An access method for test usages
*
* @return The devices to be removed from the database
*/
protected List<VmDeviceId> getRemovedVmDevices() {
return Collections.unmodifiableList(removedDeviceIds);
}
/**
* An access method for test usages
*
* @return The LUNs to update in DB
*/
protected List<LUNs> getVmLunDisksToSave() {
return vmLunDisksToSave;
}
protected List<VmDynamic> getPoweringUpVms() {
return poweringUpVms;
}
protected DbFacade getDbFacade() {
return DbFacade.getInstance();
}
protected ResourceManager getResourceManager() {
return ResourceManager.getInstance();
}
protected IVdsEventListener getVdsEventListener() {
return ResourceManager.getInstance().getEventListener();
}
public void addDiskImageDynamicToSave(Pair<Guid, DiskImageDynamic> imageDynamicByVmId) {
vmDiskImageDynamicToSave.add(imageDynamicByVmId);
}
public List<Guid> getExistingVmJobIds() {
return existingVmJobIds;
}
public Map<Guid, VmJob> getVmJobsToUpdate() {
return vmJobsToUpdate;
}
public List<Guid> getVmJobIdsToRemove() {
return vmJobIdsToRemove;
}
public VdsManager getVdsManager() {
return vdsManager;
}
public void addVmGuestAgentNics(Guid id, List<VmGuestAgentInterface> vmGuestAgentInterfaces) {
vmGuestAgentNics.put(id, vmGuestAgentInterfaces);
}
// Visible for testing
protected List<VmStatic> getExternalVmsToAdd() {
return externalVmsToAdd;
}
}
|
package org.webrtc;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import android.content.Context;
import android.os.Handler;
import android.os.SystemClock;
import android.view.Surface;
import android.view.WindowManager;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
// Android specific implementation of VideoCapturer.
// VideoCapturerAndroid.create();
// This class extends VideoCapturer with a method to easily switch between the
// front and back camera. It also provides methods for enumerating valid device
// names.
// Threading notes: this class is called from C++ code, Android Camera callbacks, and possibly
// arbitrary Java threads. All public entry points are thread safe, and delegate the work to the
// camera thread. The internal *OnCameraThread() methods must check |camera| for null to check if
// the camera has been stopped.
// TODO(magjed): This class name is now confusing - rename to Camera1VideoCapturer.
@SuppressWarnings("deprecation")
public class VideoCapturerAndroid implements
CameraVideoCapturer,
android.hardware.Camera.PreviewCallback,
SurfaceTextureHelper.OnTextureFrameAvailableListener {
private final static String TAG = "VideoCapturerAndroid";
private static final int CAMERA_STOP_TIMEOUT_MS = 7000;
private android.hardware.Camera camera; // Only non-null while capturing.
private final Object handlerLock = new Object();
// |cameraThreadHandler| must be synchronized on |handlerLock| when not on the camera thread,
// or when modifying the reference. Use maybePostOnCameraThread() instead of posting directly to
// the handler - this way all callbacks with a specifed token can be removed at once.
private Handler cameraThreadHandler;
private Context applicationContext;
// Synchronization lock for |id|.
private final Object cameraIdLock = new Object();
private int id;
private android.hardware.Camera.CameraInfo info;
private CameraStatistics cameraStatistics;
// Remember the requested format in case we want to switch cameras.
private int requestedWidth;
private int requestedHeight;
private int requestedFramerate;
// The capture format will be the closest supported format to the requested format.
private CaptureFormat captureFormat;
private final Object pendingCameraSwitchLock = new Object();
private volatile boolean pendingCameraSwitch;
private CapturerObserver frameObserver = null;
private final CameraEventsHandler eventsHandler;
private boolean firstFrameReported;
// Arbitrary queue depth. Higher number means more memory allocated & held,
// lower number means more sensitivity to processing time in the client (and
// potentially stalling the capturer if it runs out of buffers to write to).
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
private final boolean isCapturingToTexture;
private SurfaceTextureHelper surfaceHelper;
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
private final static int OPEN_CAMERA_DELAY_MS = 500;
private int openCameraAttempts;
// Camera error callback.
private final android.hardware.Camera.ErrorCallback cameraErrorCallback =
new android.hardware.Camera.ErrorCallback() {
@Override
public void onError(int error, android.hardware.Camera camera) {
String errorMessage;
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
errorMessage = "Camera server died!";
} else {
errorMessage = "Camera error: " + error;
}
Logging.e(TAG, errorMessage);
if (eventsHandler != null) {
eventsHandler.onCameraError(errorMessage);
}
}
};
public static VideoCapturerAndroid create(String name,
CameraEventsHandler eventsHandler) {
return VideoCapturerAndroid.create(name, eventsHandler, false /* captureToTexture */);
}
// Use ctor directly instead.
@Deprecated
public static VideoCapturerAndroid create(String name,
CameraEventsHandler eventsHandler, boolean captureToTexture) {
try {
return new VideoCapturerAndroid(name, eventsHandler, captureToTexture);
} catch (RuntimeException e) {
Logging.e(TAG, "Couldn't create camera.", e);
return null;
}
}
public void printStackTrace() {
Thread cameraThread = null;
synchronized (handlerLock) {
if (cameraThreadHandler != null) {
cameraThread = cameraThreadHandler.getLooper().getThread();
}
}
if (cameraThread != null) {
StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
if (cameraStackTraces.length > 0) {
Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
for (StackTraceElement stackTrace : cameraStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
// Switch camera to the next valid camera id. This can only be called while
// the camera is running.
@Override
public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
if (android.hardware.Camera.getNumberOfCameras() < 2) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("No camera to switch to.");
}
return;
}
synchronized (pendingCameraSwitchLock) {
if (pendingCameraSwitch) {
// Do not handle multiple camera switch request to avoid blocking
// camera thread by handling too many switch request from a queue.
Logging.w(TAG, "Ignoring camera switch request.");
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("Pending camera switch already in progress.");
}
return;
}
pendingCameraSwitch = true;
}
final boolean didPost = maybePostOnCameraThread(new Runnable() {
@Override
public void run() {
switchCameraOnCameraThread();
synchronized (pendingCameraSwitchLock) {
pendingCameraSwitch = false;
}
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchDone(
info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
}
}
});
if (!didPost && switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("Camera is stopped.");
}
}
// Requests a new output format from the video capturer. Captured frames
// by the camera will be scaled/or dropped by the video capturer.
// It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
// the same result as |width| = 480, |height| = 640.
// TODO(magjed/perkj): Document what this function does. Change name?
@Override
public void onOutputFormatRequest(final int width, final int height, final int framerate) {
maybePostOnCameraThread(new Runnable() {
@Override public void run() {
onOutputFormatRequestOnCameraThread(width, height, framerate);
}
});
}
// Reconfigure the camera to capture in a new format. This should only be called while the camera
// is running.
@Override
public void changeCaptureFormat(final int width, final int height, final int framerate) {
maybePostOnCameraThread(new Runnable() {
@Override public void run() {
startPreviewOnCameraThread(width, height, framerate);
}
});
}
// Helper function to retrieve the current camera id synchronously. Note that the camera id might
// change at any point by switchCamera() calls.
private int getCurrentCameraId() {
synchronized (cameraIdLock) {
return id;
}
}
@Override
public List<CaptureFormat> getSupportedFormats() {
return CameraEnumerator.getSupportedFormats(getCurrentCameraId());
}
// Returns true if this VideoCapturer is setup to capture video frames to a SurfaceTexture.
public boolean isCapturingToTexture() {
return isCapturingToTexture;
}
public VideoCapturerAndroid(String cameraName, CameraEventsHandler eventsHandler,
boolean captureToTexture) {
if (android.hardware.Camera.getNumberOfCameras() == 0) {
throw new RuntimeException("No cameras available");
}
if (cameraName == null || cameraName.equals("")) {
this.id = 0;
} else {
this.id = CameraEnumerationAndroid.getCameraIndex(cameraName);
}
this.eventsHandler = eventsHandler;
isCapturingToTexture = captureToTexture;
Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
}
private void checkIsOnCameraThread() {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "Camera is stopped - can't check thread.");
} else if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
}
private boolean maybePostOnCameraThread(Runnable runnable) {
return maybePostDelayedOnCameraThread(0 /* delayMs */, runnable);
}
private boolean maybePostDelayedOnCameraThread(int delayMs, Runnable runnable) {
synchronized (handlerLock) {
return cameraThreadHandler != null
&& cameraThreadHandler.postAtTime(
runnable, this /* token */, SystemClock.uptimeMillis() + delayMs);
}
}
@Override
public void dispose() {
Logging.d(TAG, "dispose");
}
// Note that this actually opens the camera, and Camera callbacks run on the
// thread that calls open(), so this is done on the CameraThread.
@Override
public void startCapture(
final int width, final int height, final int framerate,
final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
final CapturerObserver frameObserver) {
Logging.d(TAG, "startCapture requested: " + width + "x" + height + "@" + framerate);
if (surfaceTextureHelper == null) {
frameObserver.onCapturerStarted(false /* success */);
if (eventsHandler != null) {
eventsHandler.onCameraError("No SurfaceTexture created.");
}
return;
}
if (applicationContext == null) {
throw new IllegalArgumentException("applicationContext not set.");
}
if (frameObserver == null) {
throw new IllegalArgumentException("frameObserver not set.");
}
synchronized (handlerLock) {
if (this.cameraThreadHandler != null) {
throw new RuntimeException("Camera has already been started.");
}
this.cameraThreadHandler = surfaceTextureHelper.getHandler();
this.surfaceHelper = surfaceTextureHelper;
final boolean didPost = maybePostOnCameraThread(new Runnable() {
@Override
public void run() {
openCameraAttempts = 0;
startCaptureOnCameraThread(width, height, framerate, frameObserver,
applicationContext);
}
});
if (!didPost) {
frameObserver.onCapturerStarted(false);
if (eventsHandler != null) {
eventsHandler.onCameraError("Could not post task to camera thread.");
}
}
}
}
private void startCaptureOnCameraThread(
final int width, final int height, final int framerate, final CapturerObserver frameObserver,
final Context applicationContext) {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "startCaptureOnCameraThread: Camera is stopped");
return;
} else {
checkIsOnCameraThread();
}
}
if (camera != null) {
Logging.e(TAG, "startCaptureOnCameraThread: Camera has already been started.");
return;
}
this.applicationContext = applicationContext;
this.frameObserver = frameObserver;
this.firstFrameReported = false;
try {
try {
synchronized (cameraIdLock) {
Logging.d(TAG, "Opening camera " + id);
if (eventsHandler != null) {
eventsHandler.onCameraOpening(id);
}
camera = android.hardware.Camera.open(id);
info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(id, info);
}
} catch (RuntimeException e) {
openCameraAttempts++;
if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
Logging.e(TAG, "Camera.open failed, retrying", e);
maybePostDelayedOnCameraThread(OPEN_CAMERA_DELAY_MS, new Runnable() {
@Override public void run() {
startCaptureOnCameraThread(width, height, framerate, frameObserver,
applicationContext);
}
});
return;
}
throw e;
}
camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
Logging.d(TAG, "Camera orientation: " + info.orientation +
" .Device orientation: " + getDeviceOrientation());
camera.setErrorCallback(cameraErrorCallback);
startPreviewOnCameraThread(width, height, framerate);
frameObserver.onCapturerStarted(true);
if (isCapturingToTexture) {
surfaceHelper.startListening(this);
}
// Start camera observer.
cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
} catch (IOException|RuntimeException e) {
Logging.e(TAG, "startCapture failed", e);
// Make sure the camera is released.
stopCaptureOnCameraThread(true /* stopHandler */);
frameObserver.onCapturerStarted(false);
if (eventsHandler != null) {
eventsHandler.onCameraError("Camera can not be started.");
}
}
}
// (Re)start preview with the closest supported format to |width| x |height| @ |framerate|.
private void startPreviewOnCameraThread(int width, int height, int framerate) {
synchronized (handlerLock) {
if (cameraThreadHandler == null || camera == null) {
Logging.e(TAG, "startPreviewOnCameraThread: Camera is stopped");
return;
} else {
checkIsOnCameraThread();
}
}
Logging.d(
TAG, "startPreviewOnCameraThread requested: " + width + "x" + height + "@" + framerate);
requestedWidth = width;
requestedHeight = height;
requestedFramerate = framerate;
// Find closest supported format for |width| x |height| @ |framerate|.
final android.hardware.Camera.Parameters parameters = camera.getParameters();
final List<CaptureFormat.FramerateRange> supportedFramerates =
CameraEnumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
final CaptureFormat.FramerateRange bestFpsRange;
if (supportedFramerates.isEmpty()) {
Logging.w(TAG, "No supported preview fps range");
bestFpsRange = new CaptureFormat.FramerateRange(0, 0);
} else {
bestFpsRange = CameraEnumerationAndroid.getClosestSupportedFramerateRange(
supportedFramerates, framerate);
}
final android.hardware.Camera.Size previewSize =
CameraEnumerationAndroid.getClosestSupportedSize(
parameters.getSupportedPreviewSizes(), width, height);
final CaptureFormat captureFormat = new CaptureFormat(
previewSize.width, previewSize.height, bestFpsRange);
// Check if we are already using this capture format, then we don't need to do anything.
if (captureFormat.isSameFormat(this.captureFormat)) {
return;
}
// Update camera parameters.
Logging.d(TAG, "isVideoStabilizationSupported: " +
parameters.isVideoStabilizationSupported());
if (parameters.isVideoStabilizationSupported()) {
parameters.setVideoStabilization(true);
}
// Note: setRecordingHint(true) actually decrease frame rate on N5.
// parameters.setRecordingHint(true);
if (captureFormat.framerate.max > 0) {
parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
}
parameters.setPreviewSize(captureFormat.width, captureFormat.height);
if (!isCapturingToTexture) {
parameters.setPreviewFormat(captureFormat.imageFormat);
}
// Picture size is for taking pictures and not for preview/video, but we need to set it anyway
// as a workaround for an aspect ratio problem on Nexus 7.
final android.hardware.Camera.Size pictureSize =
CameraEnumerationAndroid.getClosestSupportedSize(
parameters.getSupportedPictureSizes(), width, height);
parameters.setPictureSize(pictureSize.width, pictureSize.height);
// Temporarily stop preview if it's already running.
if (this.captureFormat != null) {
camera.stopPreview();
// Calling |setPreviewCallbackWithBuffer| with null should clear the internal camera buffer
// queue, but sometimes we receive a frame with the old resolution after this call anyway.
camera.setPreviewCallbackWithBuffer(null);
}
// (Re)start preview.
Logging.d(TAG, "Start capturing: " + captureFormat);
this.captureFormat = captureFormat;
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
camera.setParameters(parameters);
// Calculate orientation manually and send it as CVO instead.
camera.setDisplayOrientation(0 /* degrees */);
if (!isCapturingToTexture) {
queuedBuffers.clear();
final int frameSize = captureFormat.frameSize();
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
queuedBuffers.add(buffer.array());
camera.addCallbackBuffer(buffer.array());
}
camera.setPreviewCallbackWithBuffer(this);
}
camera.startPreview();
}
// Blocks until camera is known to be stopped.
@Override
public void stopCapture() throws InterruptedException {
Logging.d(TAG, "stopCapture");
final CountDownLatch barrier = new CountDownLatch(1);
final boolean didPost = maybePostOnCameraThread(new Runnable() {
@Override public void run() {
stopCaptureOnCameraThread(true /* stopHandler */);
barrier.countDown();
}
});
if (!didPost) {
Logging.e(TAG, "Calling stopCapture() for already stopped camera.");
return;
}
if (!barrier.await(CAMERA_STOP_TIMEOUT_MS, TimeUnit.MILLISECONDS)) {
Logging.e(TAG, "Camera stop timeout");
printStackTrace();
if (eventsHandler != null) {
eventsHandler.onCameraError("Camera stop timeout");
}
}
Logging.d(TAG, "stopCapture done");
}
private void stopCaptureOnCameraThread(boolean stopHandler) {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "stopCaptureOnCameraThread: Camera is stopped");
} else {
checkIsOnCameraThread();
}
}
Logging.d(TAG, "stopCaptureOnCameraThread");
// Note that the camera might still not be started here if startCaptureOnCameraThread failed
// and we posted a retry.
// Make sure onTextureFrameAvailable() is not called anymore.
if (surfaceHelper != null) {
surfaceHelper.stopListening();
}
if (stopHandler) {
synchronized (handlerLock) {
// Clear the cameraThreadHandler first, in case stopPreview or
// other driver code deadlocks. Deadlock in
// android.hardware.Camera._stopPreview(Native Method) has
// been observed on Nexus 5 (hammerhead), OS version LMY48I.
// The camera might post another one or two preview frames
// before stopped, so we have to check for a null
// cameraThreadHandler in our handler. Remove all pending
// Runnables posted from |this|.
if (cameraThreadHandler != null) {
cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
cameraThreadHandler = null;
}
surfaceHelper = null;
}
}
if (cameraStatistics != null) {
cameraStatistics.release();
cameraStatistics = null;
}
Logging.d(TAG, "Stop preview.");
if (camera != null) {
camera.stopPreview();
camera.setPreviewCallbackWithBuffer(null);
}
queuedBuffers.clear();
captureFormat = null;
Logging.d(TAG, "Release camera.");
if (camera != null) {
camera.release();
camera = null;
}
if (eventsHandler != null) {
eventsHandler.onCameraClosed();
}
Logging.d(TAG, "stopCaptureOnCameraThread done");
}
private void switchCameraOnCameraThread() {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "switchCameraOnCameraThread: Camera is stopped");
return;
} else {
checkIsOnCameraThread();
}
}
Logging.d(TAG, "switchCameraOnCameraThread");
stopCaptureOnCameraThread(false /* stopHandler */);
synchronized (cameraIdLock) {
id = (id + 1) % android.hardware.Camera.getNumberOfCameras();
}
startCaptureOnCameraThread(requestedWidth, requestedHeight, requestedFramerate, frameObserver,
applicationContext);
Logging.d(TAG, "switchCameraOnCameraThread done");
}
private void onOutputFormatRequestOnCameraThread(int width, int height, int framerate) {
synchronized (handlerLock) {
if (cameraThreadHandler == null || camera == null) {
Logging.e(TAG, "onOutputFormatRequestOnCameraThread: Camera is stopped");
return;
} else {
checkIsOnCameraThread();
}
}
Logging.d(TAG, "onOutputFormatRequestOnCameraThread: " + width + "x" + height +
"@" + framerate);
frameObserver.onOutputFormatRequest(width, height, framerate);
}
// Exposed for testing purposes only.
Handler getCameraThreadHandler() {
return cameraThreadHandler;
}
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(
Context.WINDOW_SERVICE);
switch(wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;
case Surface.ROTATION_180:
orientation = 180;
break;
case Surface.ROTATION_270:
orientation = 270;
break;
case Surface.ROTATION_0:
default:
orientation = 0;
break;
}
return orientation;
}
private int getFrameOrientation() {
int rotation = getDeviceOrientation();
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
rotation = 360 - rotation;
}
return (info.orientation + rotation) % 360;
}
// Called on cameraThread so must not "synchronized".
@Override
public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "onPreviewFrame: Camera is stopped");
return;
} else {
checkIsOnCameraThread();
}
}
if (!queuedBuffers.contains(data)) {
// |data| is an old invalid buffer.
return;
}
if (camera != callbackCamera) {
throw new RuntimeException("Unexpected camera in callback!");
}
final long captureTimeNs =
TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
if (eventsHandler != null && !firstFrameReported) {
eventsHandler.onFirstFrameAvailable();
firstFrameReported = true;
}
cameraStatistics.addFrame();
frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
getFrameOrientation(), captureTimeNs);
camera.addCallbackBuffer(data);
}
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
synchronized (handlerLock) {
if (cameraThreadHandler == null) {
Logging.e(TAG, "onTextureFrameAvailable: Camera is stopped");
surfaceHelper.returnTextureFrame();
return;
} else {
checkIsOnCameraThread();
}
}
if (eventsHandler != null && !firstFrameReported) {
eventsHandler.onFirstFrameAvailable();
firstFrameReported = true;
}
int rotation = getFrameOrientation();
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
}
cameraStatistics.addFrame();
frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
transformMatrix, rotation, timestampNs);
}
}
|
package tinyImage.designer;
import java.awt.GridLayout;
import java.io.IOException;
import javax.swing.JPanel;
import tinyImage.Timg;
@SuppressWarnings("serial")
public class TimgDesignerPanel extends JPanel
{
volatile TimgDesignerPanel designer=this;
public volatile boolean mousedown=false;
Palette palette = new Palette(this);
Timg img=new Timg((short)(10+Short.MIN_VALUE),(short)(10+Short.MIN_VALUE));
public TimgDesignerPanel()
{
resizeCanvas();
}
public void repaintCanvas()
{
this.getParent().repaint();
//TODO?
}
private void resizeCanvas()
{
this.removeAll();
this.setLayout(new GridLayout(this.img.getHeight(), this.img.getWidth()));
System.out.println(this.img.getHeight()+" "+this.img.getWidth());
int size=this.img.getHeight()*this.img.getWidth();
for(int i=0; i<size; ++i)
{
//System.out.println(i+" "+this.img.getHeight()+" "+this.img.getWidth());
final int ci = i;
this.add(new ScaleablePixelBox(new Thread(new Runnable(){
public void run()
{
designer.getComponent(ci).setBackground(palette.getSelectedColor());
img.setColor(ci, palette.currentcolor);;
}
}), this.img.getColor(i),this));
}
this.revalidate();
}
public void newImg(short[] wh)
{
this.img = new Timg(wh[0], wh[1]);
Thread.currentThread();
Thread.yield();
this.resizeCanvas();
}
public byte[] getData()
{
return this.img.getData();
}
public void setData(byte[] bytes) throws IOException
{
this.img.setData(bytes);
this.resizeCanvas();
this.repaintCanvas();
}
}
|
package CollisionDetector;
import akka.actor.AbstractActor;
import akka.actor.ActorRef;
import akka.actor.Props;
import akka.japi.pf.ReceiveBuilder;
import droneapi.api.DroneCommander;
import droneapi.model.properties.Location;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static java.lang.Math.abs;
public class CollisionDetector extends AbstractActor {
//Minimum distance between 2 drones.
private static final double MIN_DISTANCE = 5;
//Minimum height of a drone to be able to collide
private static final double MIN_COLLIDE_HEIGHT = 0.5;
//Minimum height between 2 drones on the same location
private static final double MIN_HEIGHT = 0.5;
private Map<DroneCommander,Location> drones = new HashMap<>();
private List<ActorRef> senders = new ArrayList<>();
private ActorRef reporterRef;
private boolean collison = false;
public CollisionDetector(List<DroneCommander> droneCommanders, ActorRef reporterRef) {
this.reporterRef = reporterRef;
for(DroneCommander dc: droneCommanders){
drones.put(dc,null);
senders.add(getContext().system().actorOf(Props.create(LocationChangedMessageSender.class,
() -> new LocationChangedMessageSender(dc, self()))));
}
receive(ReceiveBuilder.
match(CollisionDetectorStopMessage.class, s -> collisionDetectorStopMessage(s)).
match(CollisionLocationChangedMessage.class, s -> collisionLocationChangedMessage(s)).build()
);
}
private void collisionDetectorStopMessage(CollisionDetectorStopMessage s){
for(ActorRef lcms: senders){
lcms.tell(s,sender());
}
reporterRef.tell(collison, self());
getContext().stop(self());
}
private void collisionLocationChangedMessage(CollisionLocationChangedMessage m){
if(collison){
return;
}
Location droneLocation = m.getLocation();
drones.put(m.getDroneCommander(),droneLocation);
//Check if drone collide with other drone that is flying
for(DroneCommander otherDrone: drones.keySet()){
if(otherDrone == m.getDroneCommander()){
continue;
}
Location otherDroneLocation = drones.get(otherDrone);
if(otherDrone == null || otherDroneLocation.getHeight() < MIN_COLLIDE_HEIGHT || droneLocation.getHeight() < MIN_COLLIDE_HEIGHT){
continue;
}
//Check if collide
if(otherDroneLocation.distance(droneLocation) < MIN_DISTANCE && abs(otherDroneLocation.getHeight() - droneLocation.getHeight()) < MIN_HEIGHT){
collison = true;
return;
}
}
}
}
|
package com.couchbase.lite.replicator;
import com.couchbase.lite.CouchbaseLiteException;
import com.couchbase.lite.Database;
import com.couchbase.lite.Document;
import com.couchbase.lite.DocumentChange;
import com.couchbase.lite.Emitter;
import com.couchbase.lite.LiteTestCase;
import com.couchbase.lite.LiveQuery;
import com.couchbase.lite.Manager;
import com.couchbase.lite.Mapper;
import com.couchbase.lite.Query;
import com.couchbase.lite.QueryEnumerator;
import com.couchbase.lite.QueryOptions;
import com.couchbase.lite.QueryRow;
import com.couchbase.lite.Revision;
import com.couchbase.lite.SavedRevision;
import com.couchbase.lite.UnsavedRevision;
import com.couchbase.lite.ValidationContext;
import com.couchbase.lite.Validator;
import com.couchbase.lite.View;
import com.couchbase.lite.auth.Authenticator;
import com.couchbase.lite.auth.AuthenticatorFactory;
import com.couchbase.lite.auth.FacebookAuthorizer;
import com.couchbase.lite.internal.RevisionInternal;
import com.couchbase.lite.mockserver.MockBulkDocs;
import com.couchbase.lite.mockserver.MockChangesFeed;
import com.couchbase.lite.mockserver.MockChangesFeedNoResponse;
import com.couchbase.lite.mockserver.MockCheckpointGet;
import com.couchbase.lite.mockserver.MockCheckpointPut;
import com.couchbase.lite.mockserver.MockDispatcher;
import com.couchbase.lite.mockserver.MockDocumentBulkGet;
import com.couchbase.lite.mockserver.MockDocumentGet;
import com.couchbase.lite.mockserver.MockDocumentPut;
import com.couchbase.lite.mockserver.MockFacebookAuthPost;
import com.couchbase.lite.mockserver.MockHelper;
import com.couchbase.lite.mockserver.MockRevsDiff;
import com.couchbase.lite.mockserver.MockSessionGet;
import com.couchbase.lite.mockserver.SmartMockResponse;
import com.couchbase.lite.mockserver.WrappedSmartMockResponse;
import com.couchbase.lite.support.HttpClientFactory;
import com.couchbase.lite.support.RemoteRequestRetry;
import com.couchbase.lite.util.Log;
import com.squareup.okhttp.mockwebserver.MockResponse;
import com.squareup.okhttp.mockwebserver.MockWebServer;
import com.squareup.okhttp.mockwebserver.RecordedRequest;
import junit.framework.Assert;
import org.apache.http.HttpEntity;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
import org.apache.http.client.CookieStore;
import org.apache.http.client.HttpClient;
import org.apache.http.client.HttpResponseException;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.cookie.Cookie;
import org.apache.http.entity.mime.MultipartEntity;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Tests for the new state machine based replicator
*/
public class ReplicationTest extends LiteTestCase {
/**
* Continuous puller starts offline
* Wait for a while .. (til what?)
* Add remote document (simulate w/ mock webserver)
* Put replication online
* Make sure doc is pulled
*/
public void testGoOnlinePuller() throws Exception {
// create mock server
MockDispatcher dispatcher = new MockDispatcher();
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
MockWebServer server = new MockWebServer();
server.setDispatcher(dispatcher);
server.play();
// mock documents to be pulled
MockDocumentGet.MockDocument mockDoc1 = new MockDocumentGet.MockDocument("doc1", "1-5e38", 1);
mockDoc1.setJsonMap(MockHelper.generateRandomJsonMap());
// checkpoint PUT or GET response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _changes response 503 error (sticky)
WrappedSmartMockResponse wrapped2 = new WrappedSmartMockResponse(new MockResponse().setResponseCode(503));
wrapped2.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, wrapped2);
// doc1 response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDoc1);
dispatcher.enqueueResponse(mockDoc1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
mockRevsDiff.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
mockBulkDocs.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// create and start replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
pullReplication.start();
// wait until a _checkpoint request have been sent
dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHECKPOINT);
putReplicationOffline(pullReplication);
// clear out existing queued mock responses to make room for new ones
dispatcher.clearQueuedResponse(MockHelper.PATH_REGEX_CHANGES);
// real _changes response with doc1
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc1));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// long poll changes feed no response
MockChangesFeedNoResponse mockChangesFeedNoResponse = new MockChangesFeedNoResponse();
mockChangesFeedNoResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedNoResponse);
putReplicationOnline(pullReplication);
waitForPutCheckpointRequestWithSeq(dispatcher, mockDoc1.getDocSeq());
stopReplication(pullReplication);
server.shutdown();
}
/**
* Start continuous replication with a closed db.
*
* Expected behavior:
* - Receive replication finished callback
* - Replication lastError will contain an exception
*/
public void testStartReplicationClosedDb() throws Exception {
Database db = this.manager.getDatabase("closed");
final CountDownLatch countDownLatch = new CountDownLatch(1);
final Replication replication = db.createPullReplication(new URL("http://fake.com/foo"));
replication.setContinuous(true);
replication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
Log.d(TAG, "changed event: %s", event);
if (replication.isRunning() == false) {
countDownLatch.countDown();
}
}
});
db.close();
replication.start();
boolean success = countDownLatch.await(60, TimeUnit.SECONDS);
assertTrue(success);
assertTrue(replication.getLastError() != null);
}
/**
* Start a replication and stop it immediately
*/
public void failingTestStartReplicationStartStop() throws Exception {
final CountDownLatch countDownLatch = new CountDownLatch(1);
final List<ReplicationStateTransition> transitions = new ArrayList<ReplicationStateTransition>();
final Replication replication = database.createPullReplication(new URL("http://fake.com/foo"));
replication.setContinuous(true);
replication.addChangeListener(new ReplicationFinishedObserver(countDownLatch));
replication.start();
replication.start(); // this should be ignored
replication.stop();
replication.stop(); // this should be ignored
boolean success = countDownLatch.await(60, TimeUnit.SECONDS);
assertTrue(success);
assertTrue(replication.getLastError() == null);
assertEquals(3, transitions.size());
assertEquals(ReplicationState.INITIAL, transitions.get(0).getSource());
assertEquals(ReplicationState.RUNNING, transitions.get(0).getDestination());
assertEquals(ReplicationState.RUNNING, transitions.get(1).getSource());
assertEquals(ReplicationState.STOPPING, transitions.get(1).getDestination());
assertEquals(ReplicationState.STOPPING, transitions.get(2).getSource());
assertEquals(ReplicationState.STOPPED, transitions.get(2).getDestination());
}
/**
* Pull replication test:
*
* - Single one-shot pull replication
* - Against simulated sync gateway
* - Remote docs do not have attachments
*/
public void testMockSinglePullSyncGw() throws Exception {
boolean shutdownMockWebserver = true;
boolean addAttachments = false;
mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW, addAttachments);
}
/**
* Pull replication test:
*
* - Single one-shot pull replication
* - Against simulated couchdb
* - Remote docs do not have attachments
*/
public void testMockSinglePullCouchDb() throws Exception {
boolean shutdownMockWebserver = true;
boolean addAttachments = false;
mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.COUCHDB, addAttachments);
}
/**
* Pull replication test:
*
* - Single one-shot pull replication
* - Against simulated couchdb
* - Remote docs have attachments
*/
public void testMockSinglePullCouchDbAttachments() throws Exception {
boolean shutdownMockWebserver = true;
boolean addAttachments = true;
mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.COUCHDB, addAttachments);
}
/**
* Pull replication test:
*
* - Single one-shot pull replication
* - Against simulated sync gateway
* - Remote docs have attachments
*
* TODO: sporadic assertion failure when checking rev field of PUT checkpoint requests
*
*/
public void testMockSinglePullSyncGwAttachments() throws Exception {
boolean shutdownMockWebserver = true;
boolean addAttachments = true;
mockSinglePull(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW, addAttachments);
}
public void testMockMultiplePullSyncGw() throws Exception {
boolean shutdownMockWebserver = true;
mockMultiplePull(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW);
}
public void testMockMultiplePullCouchDb() throws Exception {
boolean shutdownMockWebserver = true;
mockMultiplePull(shutdownMockWebserver, MockDispatcher.ServerType.COUCHDB);
}
public void testMockContinuousPullCouchDb() throws Exception {
boolean shutdownMockWebserver = true;
mockContinuousPull(shutdownMockWebserver, MockDispatcher.ServerType.COUCHDB);
}
/**
* Do a pull replication
*
* @param shutdownMockWebserver - should this test shutdown the mockwebserver
* when done? if another test wants to pick up
* where this left off, you should pass false.
* @param serverType - should the mock return the Sync Gateway server type in
* the "Server" HTTP Header? this changes the behavior of the
* replicator to use bulk_get and POST reqeusts for _changes feeds.
* @param addAttachments - should the mock sync gateway return docs with attachments?
* @return a map that contains the mockwebserver (key="server") and the mock dispatcher
* (key="dispatcher")
*/
public Map<String, Object> mockSinglePull(boolean shutdownMockWebserver, MockDispatcher.ServerType serverType, boolean addAttachments) throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(serverType);
// mock documents to be pulled
MockDocumentGet.MockDocument mockDoc1 = new MockDocumentGet.MockDocument("doc1", "1-5e38", 1);
mockDoc1.setJsonMap(MockHelper.generateRandomJsonMap());
mockDoc1.setAttachmentName("attachment.png");
MockDocumentGet.MockDocument mockDoc2 = new MockDocumentGet.MockDocument("doc2", "1-563b", 2);
mockDoc2.setJsonMap(MockHelper.generateRandomJsonMap());
mockDoc2.setAttachmentName("attachment2.png");
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc1));
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc2));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// doc1 response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDoc1);
if (addAttachments) {
mockDocumentGet.addAttachmentFilename(mockDoc1.getAttachmentName());
}
dispatcher.enqueueResponse(mockDoc1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// doc2 response
mockDocumentGet = new MockDocumentGet(mockDoc2);
if (addAttachments) {
mockDocumentGet.addAttachmentFilename(mockDoc2.getAttachmentName());
}
dispatcher.enqueueResponse(mockDoc2.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// _bulk_get response
MockDocumentBulkGet mockBulkGet = new MockDocumentBulkGet();
mockBulkGet.addDocument(mockDoc1);
mockBulkGet.addDocument(mockDoc2);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_GET, mockBulkGet);
/*MockResponse mockResponse = mockBulkGet.generateMockResponse(null);
byte[] body = mockResponse.getBody();
String bodyString = new String(body);
Log.d(TAG, "bodyString: %s", bodyString);*/
// respond to all PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(500);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// start mock server
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
Map<String, Object> headers = new HashMap<String, Object>();
headers.put("foo", "bar");
pullReplication.setHeaders(headers);
String checkpointId = pullReplication.remoteCheckpointDocID();
runReplication(pullReplication);
Log.d(TAG, "pullReplication finished");
database.addChangeListener(new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
List<DocumentChange> changes = event.getChanges();
for (DocumentChange documentChange : changes) {
Log.d(TAG, "doc change callback: %s", documentChange.getDocumentId());
}
}
});
// assert that we now have both docs in local db
assertNotNull(database);
Document doc1 = database.getDocument(mockDoc1.getDocId());
assertNotNull(doc1);
assertNotNull(doc1.getCurrentRevisionId());
assertTrue(doc1.getCurrentRevisionId().equals(mockDoc1.getDocRev()));
assertNotNull(doc1.getProperties());
assertEquals(mockDoc1.getJsonMap(), doc1.getUserProperties());
Document doc2 = database.getDocument(mockDoc2.getDocId());
assertNotNull(doc2);
assertNotNull(doc2.getCurrentRevisionId());
assertNotNull(doc2.getProperties());
assertTrue(doc2.getCurrentRevisionId().equals(mockDoc2.getDocRev()));
assertEquals(mockDoc2.getJsonMap(), doc2.getUserProperties());
// assert that docs have attachments (if applicable)
if (addAttachments) {
attachmentAsserts(mockDoc1.getAttachmentName(), doc1);
attachmentAsserts(mockDoc2.getAttachmentName(), doc2);
}
// make assertions about outgoing requests from replicator -> mock
RecordedRequest getCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT);
assertNotNull(getCheckpointRequest);
assertEquals("bar", getCheckpointRequest.getHeader("foo"));
assertTrue(getCheckpointRequest.getMethod().equals("GET"));
assertTrue(getCheckpointRequest.getPath().matches(MockHelper.PATH_REGEX_CHECKPOINT));
RecordedRequest getChangesFeedRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES);
if (serverType == MockDispatcher.ServerType.SYNC_GW) {
assertTrue(getChangesFeedRequest.getMethod().equals("POST"));
} else {
assertTrue(getChangesFeedRequest.getMethod().equals("GET"));
}
assertTrue(getChangesFeedRequest.getPath().matches(MockHelper.PATH_REGEX_CHANGES));
// wait until the mock webserver receives a PUT checkpoint request with doc #2's sequence
Log.d(TAG, "waiting for PUT checkpoint %s", mockDoc2.getDocSeq());
List<RecordedRequest> checkpointRequests = waitForPutCheckpointRequestWithSequence(dispatcher, mockDoc2.getDocSeq());
validateCheckpointRequestsRevisions(checkpointRequests);
Log.d(TAG, "got PUT checkpoint %s", mockDoc2.getDocSeq());
// assert our local sequence matches what is expected
String lastSequence = database.lastSequenceWithCheckpointId(checkpointId);
assertEquals(Integer.toString(mockDoc2.getDocSeq()), lastSequence);
// assert completed count makes sense
assertEquals(pullReplication.getChangesCount(), pullReplication.getCompletedChangesCount());
// allow for either a single _bulk_get request or individual doc requests.
// if the server is sync gateway, it is allowable for replicator to use _bulk_get
RecordedRequest bulkGetRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_GET);
if (bulkGetRequest != null) {
String bulkGetBody = bulkGetRequest.getUtf8Body();
assertTrue(bulkGetBody.contains(mockDoc1.getDocId()));
assertTrue(bulkGetBody.contains(mockDoc2.getDocId()));
} else {
RecordedRequest doc1Request = dispatcher.takeRequest(mockDoc1.getDocPathRegex());
assertTrue(doc1Request.getMethod().equals("GET"));
assertTrue(doc1Request.getPath().matches(mockDoc1.getDocPathRegex()));
RecordedRequest doc2Request = dispatcher.takeRequest(mockDoc2.getDocPathRegex());
assertTrue(doc2Request.getMethod().equals("GET"));
assertTrue(doc2Request.getPath().matches(mockDoc2.getDocPathRegex()));
}
// Shut down the server. Instances cannot be reused.
if (shutdownMockWebserver) {
server.shutdown();
}
Map<String, Object> returnVal = new HashMap<String, Object>();
returnVal.put("server", server);
returnVal.put("dispatcher", dispatcher);
return returnVal;
}
/**
*
* Simulate the following:
*
* - Add a few docs and do a pull replication
* - One doc on sync gateway is now updated
* - Do a second pull replication
* - Assert we get the updated doc and save it locally
*
*/
public Map<String, Object> mockMultiplePull(boolean shutdownMockWebserver, MockDispatcher.ServerType serverType) throws Exception {
String doc1Id = "doc1";
// create mockwebserver and custom dispatcher
boolean addAttachments = false;
// do a pull replication
Map<String, Object> serverAndDispatcher = mockSinglePull(false, serverType, addAttachments);
MockWebServer server = (MockWebServer) serverAndDispatcher.get("server");
MockDispatcher dispatcher = (MockDispatcher) serverAndDispatcher.get("dispatcher");
// clear out any possible residue left from previous test, eg, mock responses queued up as
// any recorded requests that have been logged.
dispatcher.reset();
String doc1Rev = "2-2e38";
int doc1Seq = 3;
String checkpointRev = "0-1";
String checkpointLastSequence = "2";
// checkpoint GET response w/ seq = 2
MockCheckpointGet mockCheckpointGet = new MockCheckpointGet();
mockCheckpointGet.setOk("true");
mockCheckpointGet.setRev(checkpointRev);
mockCheckpointGet.setLastSequence(checkpointLastSequence);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointGet);
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
MockChangesFeed.MockChangedDoc mockChangedDoc1 = new MockChangesFeed.MockChangedDoc()
.setSeq(doc1Seq)
.setDocId(doc1Id)
.setChangedRevIds(Arrays.asList(doc1Rev));
mockChangesFeed.add(mockChangedDoc1);
MockResponse fakeChangesResponse = mockChangesFeed.generateMockResponse();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, fakeChangesResponse);
// doc1 response
Map<String, Object> doc1JsonMap = MockHelper.generateRandomJsonMap();
MockDocumentGet mockDocumentGet = new MockDocumentGet()
.setDocId(doc1Id)
.setRev(doc1Rev)
.setJsonMap(doc1JsonMap);
String doc1PathRegex = "/db/doc1.*";
dispatcher.enqueueResponse(doc1PathRegex, mockDocumentGet.generateMockResponse());
// checkpoint PUT response
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointGet.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
runReplication(pullReplication);
// assert that we now have both docs in local db
assertNotNull(database);
Document doc1 = database.getDocument(doc1Id);
assertNotNull(doc1);
assertNotNull(doc1.getCurrentRevisionId());
assertTrue(doc1.getCurrentRevisionId().startsWith("2-"));
assertEquals(doc1JsonMap, doc1.getUserProperties());
// make assertions about outgoing requests from replicator -> mock
RecordedRequest getCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT);
assertNotNull(getCheckpointRequest);
assertTrue(getCheckpointRequest.getMethod().equals("GET"));
assertTrue(getCheckpointRequest.getPath().matches(MockHelper.PATH_REGEX_CHECKPOINT));
RecordedRequest getChangesFeedRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES);
if (serverType == MockDispatcher.ServerType.SYNC_GW) {
assertTrue(getChangesFeedRequest.getMethod().equals("POST"));
} else {
assertTrue(getChangesFeedRequest.getMethod().equals("GET"));
}
assertTrue(getChangesFeedRequest.getPath().matches(MockHelper.PATH_REGEX_CHANGES));
if (serverType == MockDispatcher.ServerType.SYNC_GW) {
Map <String, Object> jsonMap = Manager.getObjectMapper().readValue(getChangesFeedRequest.getUtf8Body(), Map.class);
assertTrue(jsonMap.containsKey("since"));
Integer since = (Integer) jsonMap.get("since");
assertEquals(2, since.intValue());
}
RecordedRequest doc1Request = dispatcher.takeRequest(doc1PathRegex);
assertTrue(doc1Request.getMethod().equals("GET"));
assertTrue(doc1Request.getPath().matches("/db/doc1\\?rev=2-2e38.*"));
// wait until the mock webserver receives a PUT checkpoint request with doc #2's sequence
int expectedLastSequence = doc1Seq;
List<RecordedRequest> checkpointRequests = waitForPutCheckpointRequestWithSequence(dispatcher, expectedLastSequence);
assertEquals(1, checkpointRequests.size());
// assert our local sequence matches what is expected
String lastSequence = database.lastSequenceWithCheckpointId(pullReplication.remoteCheckpointDocID());
assertEquals(Integer.toString(expectedLastSequence), lastSequence);
// assert completed count makes sense
assertEquals(pullReplication.getChangesCount(), pullReplication.getCompletedChangesCount());
if (shutdownMockWebserver) {
server.shutdown();
}
Map<String, Object> returnVal = new HashMap<String, Object>();
returnVal.put("server", server);
returnVal.put("dispatcher", dispatcher);
return returnVal;
}
public Map<String, Object> mockContinuousPull(boolean shutdownMockWebserver, MockDispatcher.ServerType serverType) throws Exception {
assertTrue(serverType == MockDispatcher.ServerType.COUCHDB);
final int numMockRemoteDocs = 20; // must be multiple of 10!
final AtomicInteger numDocsPulledLocally = new AtomicInteger(0);
MockDispatcher dispatcher = new MockDispatcher();
dispatcher.setServerType(serverType);
int numDocsPerChangesResponse = numMockRemoteDocs / 10;
MockWebServer server = MockHelper.getPreloadedPullTargetMockCouchDB(dispatcher, numMockRemoteDocs, numDocsPerChangesResponse);
server.play();
final CountDownLatch receivedAllDocs = new CountDownLatch(1);
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
final CountDownLatch replicationDoneSignal = new CountDownLatch(1);
pullReplication.addChangeListener(new ReplicationFinishedObserver(replicationDoneSignal));
database.addChangeListener(new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
List<DocumentChange> changes = event.getChanges();
for (DocumentChange change : changes) {
numDocsPulledLocally.addAndGet(1);
}
if (numDocsPulledLocally.get() == numMockRemoteDocs) {
receivedAllDocs.countDown();
}
}
});
pullReplication.start();
// wait until we received all mock docs or timeout occurs
boolean success = receivedAllDocs.await(60, TimeUnit.SECONDS);
assertTrue(success);
// make sure all docs in local db
Map<String, Object> allDocs = database.getAllDocs(new QueryOptions());
Integer totalRows = (Integer) allDocs.get("total_rows");
List rows = (List) allDocs.get("rows");
assertEquals(numMockRemoteDocs, totalRows.intValue());
assertEquals(numMockRemoteDocs, rows.size());
// cleanup / shutdown
pullReplication.stop();
success = replicationDoneSignal.await(30, TimeUnit.SECONDS);
assertTrue(success);
// wait until the mock webserver receives a PUT checkpoint request with last do's sequence,
// this avoids ugly and confusing exceptions in the logs.
List<RecordedRequest> checkpointRequests = waitForPutCheckpointRequestWithSequence(dispatcher, numMockRemoteDocs - 1);
validateCheckpointRequestsRevisions(checkpointRequests);
if (shutdownMockWebserver) {
server.shutdown();
}
Map<String, Object> returnVal = new HashMap<String, Object>();
returnVal.put("server", server);
returnVal.put("dispatcher", dispatcher);
return returnVal;
}
public void testAttachmentsDeletedOnPull() throws Exception {
String doc1Id = "doc1";
int doc1Rev2Generation = 2;
String doc1Rev2Digest = "b";
String doc1Rev2 = String.format("%d-%s", doc1Rev2Generation, doc1Rev2Digest);
int doc1Seq1 = 1;
String doc1AttachName = "attachment.png";
String contentType = "image/png";
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
server.play();
// add some documents - verify it has an attachment
Document doc1 = createDocumentForPushReplication(doc1Id, doc1AttachName, contentType);
String doc1Rev1 = doc1.getCurrentRevisionId();
database.clearDocumentCache();
doc1 = database.getDocument(doc1.getId());
assertTrue(doc1.getCurrentRevision().getAttachments().size() > 0);
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// checkpoint PUT response
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// add response to 1st _changes request
final MockDocumentGet.MockDocument mockDocument1 = new MockDocumentGet.MockDocument(
doc1Id, doc1Rev2, doc1Seq1);
Map<String, Object> newProperties = new HashMap<String, Object>(doc1.getProperties());
newProperties.put("_rev", doc1Rev2);
mockDocument1.setJsonMap(newProperties);
mockDocument1.setAttachmentName(doc1AttachName);
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument1));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// add sticky _changes response to feed=longpoll that just blocks for 60 seconds to emulate
// server that doesn't have any new changes
MockChangesFeedNoResponse mockChangesFeedNoResponse = new MockChangesFeedNoResponse();
mockChangesFeedNoResponse.setDelayMs(60 * 1000);
mockChangesFeedNoResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedNoResponse);
// add response to doc get
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDocument1);
mockDocumentGet.addAttachmentFilename(mockDocument1.getAttachmentName());
mockDocumentGet.setIncludeAttachmentPart(false);
Map<String, Object> revHistory = new HashMap<String, Object>();
revHistory.put("start", doc1Rev2Generation);
List ids = Arrays.asList(
RevisionInternal.digestFromRevID(doc1Rev2),
RevisionInternal.digestFromRevID(doc1Rev1)
);
revHistory.put("ids",ids);
mockDocumentGet.setRevHistoryMap(revHistory);
dispatcher.enqueueResponse(mockDocument1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// create and start pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
pullReplication.start();
// wait for the next PUT checkpoint request/response
waitForPutCheckpointRequestWithSeq(dispatcher, 1);
stopReplication(pullReplication);
// clear doc cache
database.clearDocumentCache();
// make sure doc has attachments
Document doc1Fetched = database.getDocument(doc1.getId());
assertTrue(doc1Fetched.getCurrentRevision().getAttachments().size() > 0);
server.shutdown();
}
/**
* This is essentially a regression test for a deadlock
* that was happening when the LiveQuery#onDatabaseChanged()
* was calling waitForUpdateThread(), but that thread was
* waiting on connection to be released by the thread calling
* waitForUpdateThread(). When the deadlock bug was present,
* this test would trigger the deadlock and never finish.
*
* TODO: sporadic assertion failure when checking rev field of PUT checkpoint requests
*/
public void testPullerWithLiveQuery() throws Throwable {
View view = database.getView("testPullerWithLiveQueryView");
view.setMapReduce(new Mapper() {
@Override
public void map(Map<String, Object> document, Emitter emitter) {
if (document.get("_id") != null) {
emitter.emit(document.get("_id"), null);
}
}
}, null, "1");
final CountDownLatch countDownLatch = new CountDownLatch(1);
LiveQuery allDocsLiveQuery = view.createQuery().toLiveQuery();
allDocsLiveQuery.addChangeListener(new LiveQuery.ChangeListener() {
@Override
public void changed(LiveQuery.ChangeEvent event) {
int numTimesCalled = 0;
if (event.getError() != null) {
throw new RuntimeException(event.getError());
}
if (event.getRows().getCount() == 2) {
countDownLatch.countDown();
}
}
});
// kick off live query
allDocsLiveQuery.start();
// do pull replication against mock
mockSinglePull(true, MockDispatcher.ServerType.SYNC_GW, true);
// make sure we were called back with both docs
boolean success = countDownLatch.await(30, TimeUnit.SECONDS);
assertTrue(success);
// clean up
allDocsLiveQuery.stop();
}
/**
* Make sure that if a continuous push gets an error
* pushing a doc, it will keep retrying it rather than giving up right away.
*
* @throws Exception
*/
public void failingTestPushRetry() throws Exception {
RemoteRequestRetry.RETRY_DELAY_MS = 5; // speed up test execution
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
// checkpoint GET response w/ 404 + respond to all PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(500);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
// mockRevsDiff.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- 503 errors
MockResponse mockResponse = new MockResponse().setResponseCode(503);
WrappedSmartMockResponse mockBulkDocs = new WrappedSmartMockResponse(mockResponse, false);
mockBulkDocs.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
server.play();
// create replication
Replication replication = database.createPushReplication(server.getUrl("/db"));
replication.setContinuous(true);
CountDownLatch replicationIdle = new CountDownLatch(1);
ReplicationIdleObserver idleObserver = new ReplicationIdleObserver(replicationIdle);
replication.addChangeListener(idleObserver);
replication.start();
// wait until idle
boolean success = replicationIdle.await(30, TimeUnit.SECONDS);
assertTrue(success);
replication.removeChangeListener(idleObserver);
// create a doc in local db
Document doc1 = createDocumentForPushReplication("doc1", null, null);
// we should expect to at least see numAttempts attempts at doing POST to _bulk_docs
int numAttempts = RemoteRequestRetry.MAX_RETRIES;
for (int i=0; i < numAttempts; i++) {
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(request);
dispatcher.takeRecordedResponseBlocking(request);
}
// TODO: test fails here, because there's nothing to cause it to retry after the
// TODO: request does it's retry attempt. Eg, continuous replicator needs to keep
// TODO: sending new requests
// but it shouldn't give up there, it should keep retrying, so we should expect to
// see at least one more request (probably lots more, but let's just wait for one)
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(request);
dispatcher.takeRecordedResponseBlocking(request);
stopReplication(replication);
server.shutdown();
}
public void testMockSinglePush() throws Exception {
boolean shutdownMockWebserver = true;
mockSinglePush(shutdownMockWebserver, MockDispatcher.ServerType.SYNC_GW);
}
/**
* Do a push replication
*
* - Create docs in local db
* - One with no attachment
* - One with small attachment
* - One with large attachment
*
*/
public Map<String, Object> mockSinglePush(boolean shutdownMockWebserver, MockDispatcher.ServerType serverType) throws Exception {
String doc1Id = "doc1";
String doc2Id = "doc2";
String doc3Id = "doc3";
String doc4Id = "doc4";
String doc2PathRegex = String.format("/db/%s.*", doc2Id);
String doc3PathRegex = String.format("/db/%s.*", doc3Id);
String doc2AttachName = "attachment.png";
String doc3AttachName = "attachment2.png";
String contentType = "image/png";
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(serverType);
server.play();
// add some documents
Document doc1 = createDocumentForPushReplication(doc1Id, null, null);
Document doc2 = createDocumentForPushReplication(doc2Id, doc2AttachName, contentType);
Document doc3 = createDocumentForPushReplication(doc3Id, doc3AttachName, contentType);
Document doc4 = createDocumentForPushReplication(doc4Id, null, null);
doc4.delete();
// checkpoint GET response w/ 404 + respond to all PUT Checkpoint requests
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(50);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// doc PUT responses for docs with attachments
MockDocumentPut mockDoc2Put = new MockDocumentPut()
.setDocId(doc2Id)
.setRev(doc2.getCurrentRevisionId());
dispatcher.enqueueResponse(doc2PathRegex, mockDoc2Put.generateMockResponse());
MockDocumentPut mockDoc3Put = new MockDocumentPut()
.setDocId(doc3Id)
.setRev(doc3.getCurrentRevisionId());
dispatcher.enqueueResponse(doc3PathRegex, mockDoc3Put.generateMockResponse());
// run replication
Replication replication = database.createPushReplication(server.getUrl("/db"));
replication.setContinuous(false);
if (serverType != MockDispatcher.ServerType.SYNC_GW) {
replication.setCreateTarget(true);
Assert.assertTrue(replication.shouldCreateTarget());
}
runReplication(replication);
// make assertions about outgoing requests from replicator -> mock
RecordedRequest getCheckpointRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHECKPOINT);
assertTrue(getCheckpointRequest.getMethod().equals("GET"));
assertTrue(getCheckpointRequest.getPath().matches(MockHelper.PATH_REGEX_CHECKPOINT));
RecordedRequest revsDiffRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_REVS_DIFF);
assertTrue(revsDiffRequest.getUtf8Body().contains(doc1Id));
RecordedRequest bulkDocsRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_DOCS);
assertTrue(bulkDocsRequest.getUtf8Body().contains(doc1Id));
Map <String, Object> bulkDocsJson = Manager.getObjectMapper().readValue(bulkDocsRequest.getUtf8Body(), Map.class);
Map <String, Object> doc4Map = MockBulkDocs.findDocById(bulkDocsJson, doc4Id);
assertTrue(((Boolean)doc4Map.get("_deleted")).booleanValue() == true);
assertFalse(bulkDocsRequest.getUtf8Body().contains(doc2Id));
RecordedRequest doc2putRequest = dispatcher.takeRequest(doc2PathRegex);
assertTrue(doc2putRequest.getUtf8Body().contains(doc2Id));
assertFalse(doc2putRequest.getUtf8Body().contains(doc3Id));
RecordedRequest doc3putRequest = dispatcher.takeRequest(doc3PathRegex);
assertTrue(doc3putRequest.getUtf8Body().contains(doc3Id));
assertFalse(doc3putRequest.getUtf8Body().contains(doc2Id));
// wait until the mock webserver receives a PUT checkpoint request
int expectedLastSequence = 5;
Log.d(TAG, "waiting for put checkpoint with lastSequence: %d", expectedLastSequence);
List<RecordedRequest> checkpointRequests = waitForPutCheckpointRequestWithSequence(dispatcher, expectedLastSequence);
Log.d(TAG, "done waiting for put checkpoint with lastSequence: %d", expectedLastSequence);
validateCheckpointRequestsRevisions(checkpointRequests);
// assert our local sequence matches what is expected
String lastSequence = database.lastSequenceWithCheckpointId(replication.remoteCheckpointDocID());
assertEquals(Integer.toString(expectedLastSequence), lastSequence);
// assert completed count makes sense
assertEquals(replication.getChangesCount(), replication.getCompletedChangesCount());
// Shut down the server. Instances cannot be reused.
if (shutdownMockWebserver) {
server.shutdown();
}
Map<String, Object> returnVal = new HashMap<String, Object>();
returnVal.put("server", server);
returnVal.put("dispatcher", dispatcher);
return returnVal;
}
private void workAroundSaveCheckpointRaceCondition() throws InterruptedException {
// sleep a bit to give it a chance to save checkpoint to db
Thread.sleep(500);
}
public void testContinuousPushReplicationGoesIdle() throws Exception {
// make sure we are starting empty
assertEquals(0, database.getLastSequenceNumber());
// add docs
Map<String,Object> properties1 = new HashMap<String,Object>();
properties1.put("doc1", "testContinuousPushReplicationGoesIdle");
final Document doc1 = createDocWithProperties(properties1);
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
server.play();
// checkpoint GET response w/ 404. also receives checkpoint PUT's
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// replication to do initial sync up - has to be continuous replication so the checkpoint id
// matches the next continuous replication we're gonna do later.
Replication firstPusher = database.createPushReplication(server.getUrl("/db"));
firstPusher.setContinuous(true);
final String checkpointId = firstPusher.remoteCheckpointDocID(); // save the checkpoint id for later usage
// start the continuous replication
CountDownLatch replicationIdleSignal = new CountDownLatch(1);
ReplicationIdleObserver replicationIdleObserver = new ReplicationIdleObserver(replicationIdleSignal);
firstPusher.addChangeListener(replicationIdleObserver);
firstPusher.start();
// wait until we get an IDLE event
boolean successful = replicationIdleSignal.await(30, TimeUnit.SECONDS);
assertTrue(successful);
stopReplication(firstPusher);
// wait until replication does PUT checkpoint with lastSequence=1
int expectedLastSequence = 1;
waitForPutCheckpointRequestWithSeq(dispatcher, expectedLastSequence);
// the last sequence should be "1" at this point. we will use this later
final String lastSequence = database.lastSequenceWithCheckpointId(checkpointId);
assertEquals("1", lastSequence);
// start a second continuous replication
Replication secondPusher = database.createPushReplication(server.getUrl("/db"));
secondPusher.setContinuous(true);
final String secondPusherCheckpointId = secondPusher.remoteCheckpointDocID();
assertEquals(checkpointId, secondPusherCheckpointId);
// remove current handler for the GET/PUT checkpoint request, and
// install a new handler that returns the lastSequence from previous replication
dispatcher.clearQueuedResponse(MockHelper.PATH_REGEX_CHECKPOINT);
MockCheckpointGet mockCheckpointGet = new MockCheckpointGet();
mockCheckpointGet.setLastSequence(lastSequence);
mockCheckpointGet.setRev("0-2");
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointGet);
// start second replication
replicationIdleSignal = new CountDownLatch(1);
replicationIdleObserver = new ReplicationIdleObserver(replicationIdleSignal);
secondPusher.addChangeListener(replicationIdleObserver);
secondPusher.start();
// wait until we get an IDLE event
successful = replicationIdleSignal.await(30, TimeUnit.SECONDS);
assertTrue(successful);
stopReplication(secondPusher);
}
public void testContinuousReplication404Changes() throws Exception {
int previous = PullerInternal.CHANGE_TRACKER_RESTART_DELAY_MS;
PullerInternal.CHANGE_TRACKER_RESTART_DELAY_MS = 5;
try {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
server.play();
// mock checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// mock _changes response
for (int i=0; i<100; i++) {
MockResponse mockChangesFeed = new MockResponse();
MockHelper.set404NotFoundJson(mockChangesFeed);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed);
}
// create new replication
int retryDelaySeconds = 1;
Replication pull = database.createPullReplication(server.getUrl("/db"));
pull.setContinuous(true);
// add done listener to replication
CountDownLatch replicationDoneSignal = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal);
pull.addChangeListener(replicationFinishedObserver);
// start the replication
pull.start();
// wait until we get a few requests
Log.d(TAG, "Waiting for a _changes request");
RecordedRequest changesReq = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHANGES);
Log.d(TAG, "Got first _changes request, waiting for another _changes request");
changesReq = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHANGES);
Log.d(TAG, "Got second _changes request, waiting for another _changes request");
changesReq = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHANGES);
Log.d(TAG, "Got third _changes request, stopping replicator");
// the replication should still be running
assertEquals(1, replicationDoneSignal.getCount());
// cleanup
stopReplication(pull);
server.shutdown();
} finally {
PullerInternal.CHANGE_TRACKER_RESTART_DELAY_MS = previous;
}
}
/**
* Regression test for issue couchbase/couchbase-lite-android#174
*/
public void testAllLeafRevisionsArePushed() throws Exception {
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderRevDiffsAllMissing();
mockHttpClient.setResponseDelayMilliseconds(250);
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
HttpClientFactory mockHttpClientFactory = new HttpClientFactory() {
@Override
public HttpClient getHttpClient() {
return mockHttpClient;
}
@Override
public void addCookies(List<Cookie> cookies) {
}
@Override
public void deleteCookie(String name) {
}
@Override
public CookieStore getCookieStore() {
return null;
}
};
manager.setDefaultHttpClientFactory(mockHttpClientFactory);
Document doc = database.createDocument();
SavedRevision rev1a = doc.createRevision().save();
SavedRevision rev2a = createRevisionWithRandomProps(rev1a, false);
SavedRevision rev3a = createRevisionWithRandomProps(rev2a, false);
// delete the branch we've been using, then create a new one to replace it
SavedRevision rev4a = rev3a.deleteDocument();
SavedRevision rev2b = createRevisionWithRandomProps(rev1a, true);
assertEquals(rev2b.getId(), doc.getCurrentRevisionId());
// sync with remote DB -- should push both leaf revisions
Replication push = database.createPushReplication(getReplicationURL());
runReplication(push);
assertNull(push.getLastError());
// find the _revs_diff captured request and decode into json
boolean foundRevsDiff = false;
List<HttpRequest> captured = mockHttpClient.getCapturedRequests();
for (HttpRequest httpRequest : captured) {
if (httpRequest instanceof HttpPost) {
HttpPost httpPost = (HttpPost) httpRequest;
if (httpPost.getURI().toString().endsWith("_revs_diff")) {
foundRevsDiff = true;
Map<String, Object> jsonMap = CustomizableMockHttpClient.getJsonMapFromRequest(httpPost);
// assert that it contains the expected revisions
List<String> revisionIds = (List) jsonMap.get(doc.getId());
assertEquals(2, revisionIds.size());
assertTrue(revisionIds.contains(rev4a.getId()));
assertTrue(revisionIds.contains(rev2b.getId()));
}
}
}
assertTrue(foundRevsDiff);
}
/**
* Verify that when a conflict is resolved on (mock) Sync Gateway
* and a pull replication is done, the conflict is resolved locally.
*
* - Create local docs in conflict
* - Simulate sync gw responses that resolve the conflict
* - Do pull replication
* - Assert conflict is resolved locally
*
*/
public void testRemoteConflictResolution() throws Exception {
// Create a document with two conflicting edits.
Document doc = database.createDocument();
SavedRevision rev1 = doc.createRevision().save();
SavedRevision rev2a = createRevisionWithRandomProps(rev1, false);
SavedRevision rev2b = createRevisionWithRandomProps(rev1, true);
// make sure we can query the db to get the conflict
Query allDocsQuery = database.createAllDocumentsQuery();
allDocsQuery.setAllDocsMode(Query.AllDocsMode.ONLY_CONFLICTS);
QueryEnumerator rows = allDocsQuery.run();
boolean foundDoc = false;
assertEquals(1, rows.getCount());
for (Iterator<QueryRow> it = rows; it.hasNext();) {
QueryRow row = it.next();
if (row.getDocument().getId().equals(doc.getId())) {
foundDoc = true;
}
}
assertTrue(foundDoc);
// make sure doc in conflict
assertTrue(doc.getConflictingRevisions().size() > 1);
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
int rev3PromotedGeneration = 3;
String rev3PromotedDigest = "d46b";
String rev3Promoted = String.format("%d-%s", rev3PromotedGeneration, rev3PromotedDigest);
int rev3DeletedGeneration = 3;
String rev3DeletedDigest = "e768";
String rev3Deleted = String.format("%d-%s", rev3DeletedGeneration, rev3DeletedDigest);
int seq = 4;
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
MockChangesFeed.MockChangedDoc mockChangedDoc = new MockChangesFeed.MockChangedDoc();
mockChangedDoc.setDocId(doc.getId());
mockChangedDoc.setSeq(seq);
mockChangedDoc.setChangedRevIds(Arrays.asList(rev3Promoted, rev3Deleted));
mockChangesFeed.add(mockChangedDoc);
MockResponse response = mockChangesFeed.generateMockResponse();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, response);
// docRev3Promoted response
MockDocumentGet.MockDocument docRev3Promoted = new MockDocumentGet.MockDocument(doc.getId(), rev3Promoted, seq);
docRev3Promoted.setJsonMap(MockHelper.generateRandomJsonMap());
MockDocumentGet mockDocRev3PromotedGet = new MockDocumentGet(docRev3Promoted);
Map<String, Object> rev3PromotedRevHistory = new HashMap<String, Object>();
rev3PromotedRevHistory.put("start", rev3PromotedGeneration);
List ids = Arrays.asList(
rev3PromotedDigest,
RevisionInternal.digestFromRevID(rev2a.getId()),
RevisionInternal.digestFromRevID(rev2b.getId())
);
rev3PromotedRevHistory.put("ids", ids);
mockDocRev3PromotedGet.setRevHistoryMap(rev3PromotedRevHistory);
dispatcher.enqueueResponse(docRev3Promoted.getDocPathRegex(), mockDocRev3PromotedGet.generateMockResponse());
// docRev3Deleted response
MockDocumentGet.MockDocument docRev3Deleted = new MockDocumentGet.MockDocument(doc.getId(), rev3Deleted, seq);
Map<String, Object> jsonMap = MockHelper.generateRandomJsonMap();
jsonMap.put("_deleted", true);
docRev3Deleted.setJsonMap(jsonMap);
MockDocumentGet mockDocRev3DeletedGet = new MockDocumentGet(docRev3Deleted);
Map<String, Object> rev3DeletedRevHistory = new HashMap<String, Object>();
rev3DeletedRevHistory.put("start", rev3DeletedGeneration);
ids = Arrays.asList(
rev3DeletedDigest,
RevisionInternal.digestFromRevID(rev2b.getId()),
RevisionInternal.digestFromRevID(rev1.getId())
);
rev3DeletedRevHistory.put("ids", ids);
mockDocRev3DeletedGet.setRevHistoryMap(rev3DeletedRevHistory);
dispatcher.enqueueResponse(docRev3Deleted.getDocPathRegex(), mockDocRev3DeletedGet.generateMockResponse());
// start mock server
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
runReplication(pullReplication);
assertNull(pullReplication.getLastError());
// assertions about outgoing requests
RecordedRequest changesRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES);
assertNotNull(changesRequest);
RecordedRequest docRev3DeletedRequest = dispatcher.takeRequest(docRev3Deleted.getDocPathRegex());
assertNotNull(docRev3DeletedRequest);
RecordedRequest docRev3PromotedRequest = dispatcher.takeRequest(docRev3Promoted.getDocPathRegex());
assertNotNull(docRev3PromotedRequest);
// Make sure the conflict was resolved locally.
assertEquals(1, doc.getConflictingRevisions().size());
}
public void testPushReplicationCanMissDocs() throws Exception {
assertEquals(0, database.getLastSequenceNumber());
Map<String,Object> properties1 = new HashMap<String,Object>();
properties1.put("doc1", "testPushReplicationCanMissDocs");
final Document doc1 = createDocWithProperties(properties1);
Map<String,Object> properties2 = new HashMap<String,Object>();
properties1.put("doc2", "testPushReplicationCanMissDocs");
final Document doc2 = createDocWithProperties(properties2);
UnsavedRevision doc2UnsavedRev = doc2.createRevision();
InputStream attachmentStream = getAsset("attachment.png");
doc2UnsavedRev.setAttachment("attachment.png", "image/png", attachmentStream);
SavedRevision doc2Rev = doc2UnsavedRev.save();
assertNotNull(doc2Rev);
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
mockHttpClient.setResponder("_bulk_docs", new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
String json = "{\"error\":\"not_found\",\"reason\":\"missing\"}";
return CustomizableMockHttpClient.generateHttpResponseObject(404, "NOT FOUND", json);
}
});
mockHttpClient.setResponder(doc2.getId(), new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
Map<String, Object> responseObject = new HashMap<String, Object>();
responseObject.put("id", doc2.getId());
responseObject.put("ok", true);
responseObject.put("rev", doc2.getCurrentRevisionId());
return CustomizableMockHttpClient.generateHttpResponseObject(responseObject);
}
});
// create a replication obeserver to wait until replication finishes
CountDownLatch replicationDoneSignal = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal);
// create replication and add observer
manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient));
Replication pusher = database.createPushReplication(getReplicationURL());
pusher.addChangeListener(replicationFinishedObserver);
// save the checkpoint id for later usage
String checkpointId = pusher.remoteCheckpointDocID();
// kick off the replication
pusher.start();
// wait for it to finish
boolean success = replicationDoneSignal.await(60, TimeUnit.SECONDS);
assertTrue(success);
Log.d(TAG, "replicationDoneSignal finished");
// we would expect it to have recorded an error because one of the docs (the one without the attachment)
// will have failed.
assertNotNull(pusher.getLastError());
// workaround for the fact that the replicationDoneSignal.wait() call will unblock before all
// the statements in Replication.stopped() have even had a chance to execute.
// (specifically the ones that come after the call to notifyChangeListeners())
Thread.sleep(500);
String localLastSequence = database.lastSequenceWithCheckpointId(checkpointId);
Log.d(TAG, "database.lastSequenceWithCheckpointId(): " + localLastSequence);
Log.d(TAG, "doc2.getCurrentRevision().getSequence(): " + doc2.getCurrentRevision().getSequence());
String msg = "Since doc1 failed, the database should _not_ have had its lastSequence bumped" +
" to doc2's sequence number. If it did, it's bug: github.com/couchbase/couchbase-lite-java-core/issues/95";
assertFalse(msg, Long.toString(doc2.getCurrentRevision().getSequence()).equals(localLastSequence));
assertNull(localLastSequence);
assertTrue(doc2.getCurrentRevision().getSequence() > 0);
}
public void testPushUpdatedDocWithoutReSendingAttachments() throws Exception {
assertEquals(0, database.getLastSequenceNumber());
Map<String,Object> properties1 = new HashMap<String,Object>();
properties1.put("dynamic", 1);
final Document doc = createDocWithProperties(properties1);
SavedRevision doc1Rev = doc.getCurrentRevision();
// Add attachment to document
UnsavedRevision doc2UnsavedRev = doc.createRevision();
InputStream attachmentStream = getAsset("attachment.png");
doc2UnsavedRev.setAttachment("attachment.png", "image/png", attachmentStream);
SavedRevision doc2Rev = doc2UnsavedRev.save();
assertNotNull(doc2Rev);
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
mockHttpClient.setResponder(doc.getId(), new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
Map<String, Object> responseObject = new HashMap<String, Object>();
responseObject.put("id", doc.getId());
responseObject.put("ok", true);
responseObject.put("rev", doc.getCurrentRevisionId());
return CustomizableMockHttpClient.generateHttpResponseObject(responseObject);
}
});
// create replication and add observer
manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient));
Replication pusher = database.createPushReplication(getReplicationURL());
runReplication(pusher);
List<HttpRequest> captured = mockHttpClient.getCapturedRequests();
for (HttpRequest httpRequest : captured) {
// verify that there are no PUT requests with attachments
if (httpRequest instanceof HttpPut) {
HttpPut httpPut = (HttpPut) httpRequest;
HttpEntity entity=httpPut.getEntity();
//assertFalse("PUT request with updated doc properties contains attachment", entity instanceof MultipartEntity);
}
}
mockHttpClient.clearCapturedRequests();
Document oldDoc =database.getDocument(doc.getId());
UnsavedRevision aUnsavedRev = oldDoc.createRevision();
Map<String,Object> prop = new HashMap<String,Object>();
prop.putAll(oldDoc.getProperties());
prop.put("dynamic", (Integer) oldDoc.getProperty("dynamic") +1);
aUnsavedRev.setProperties(prop);
final SavedRevision savedRev=aUnsavedRev.save();
mockHttpClient.setResponder(doc.getId(), new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
Map<String, Object> responseObject = new HashMap<String, Object>();
responseObject.put("id", doc.getId());
responseObject.put("ok", true);
responseObject.put("rev", savedRev.getId());
return CustomizableMockHttpClient.generateHttpResponseObject(responseObject);
}
});
final String json = String.format("{\"%s\":{\"missing\":[\"%s\"],\"possible_ancestors\":[\"%s\",\"%s\"]}}",doc.getId(),savedRev.getId(),doc1Rev.getId(), doc2Rev.getId());
mockHttpClient.setResponder("_revs_diff", new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
return mockHttpClient.generateHttpResponseObject(json);
}
});
pusher = database.createPushReplication(getReplicationURL());
runReplication(pusher);
captured = mockHttpClient.getCapturedRequests();
for (HttpRequest httpRequest : captured) {
// verify that there are no PUT requests with attachments
if (httpRequest instanceof HttpPut) {
HttpPut httpPut = (HttpPut) httpRequest;
HttpEntity entity=httpPut.getEntity();
assertFalse("PUT request with updated doc properties contains attachment", entity instanceof MultipartEntity);
}
}
}
public void testServerDoesNotSupportMultipart() throws Exception {
assertEquals(0, database.getLastSequenceNumber());
Map<String,Object> properties1 = new HashMap<String,Object>();
properties1.put("dynamic", 1);
final Document doc = createDocWithProperties(properties1);
SavedRevision doc1Rev = doc.getCurrentRevision();
// Add attachment to document
UnsavedRevision doc2UnsavedRev = doc.createRevision();
InputStream attachmentStream = getAsset("attachment.png");
doc2UnsavedRev.setAttachment("attachment.png", "image/png", attachmentStream);
SavedRevision doc2Rev = doc2UnsavedRev.save();
assertNotNull(doc2Rev);
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
Queue<CustomizableMockHttpClient.Responder> responders = new LinkedList<CustomizableMockHttpClient.Responder>();
//Reject multipart PUT with response code 415
responders.add(new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
String json = "{\"error\":\"Unsupported Media Type\",\"reason\":\"missing\"}";
return CustomizableMockHttpClient.generateHttpResponseObject(415, "Unsupported Media Type", json);
}
});
// second call should be plain json, return good response
responders.add(new CustomizableMockHttpClient.Responder() {
@Override
public HttpResponse execute(HttpUriRequest httpUriRequest) throws IOException {
Map<String, Object> responseObject = new HashMap<String, Object>();
responseObject.put("id", doc.getId());
responseObject.put("ok", true);
responseObject.put("rev", doc.getCurrentRevisionId());
return CustomizableMockHttpClient.generateHttpResponseObject(responseObject);
}
});
ResponderChain responderChain = new ResponderChain(responders);
mockHttpClient.setResponder(doc.getId(), responderChain);
// create replication and add observer
manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient));
Replication pusher = database.createPushReplication(getReplicationURL());
runReplication(pusher);
List<HttpRequest> captured = mockHttpClient.getCapturedRequests();
int entityIndex =0;
for (HttpRequest httpRequest : captured) {
// verify that there are no PUT requests with attachments
if (httpRequest instanceof HttpPut) {
HttpPut httpPut = (HttpPut) httpRequest;
HttpEntity entity=httpPut.getEntity();
if(entityIndex++ == 0) {
assertTrue("PUT request with attachment is not multipart", entity instanceof MultipartEntity);
} else {
assertFalse("PUT request with attachment is multipart", entity instanceof MultipartEntity);
}
}
}
}
public void testServerIsSyncGatewayVersion() {
Replication pusher = database.createPushReplication(getReplicationURL());
assertFalse(pusher.serverIsSyncGatewayVersion("0.01"));
pusher.setServerType("Couchbase Sync Gateway/0.93");
assertTrue(pusher.serverIsSyncGatewayVersion("0.92"));
assertFalse(pusher.serverIsSyncGatewayVersion("0.94"));
}
public void testDifferentCheckpointsFilteredReplication() throws Exception {
Replication pullerNoFilter = database.createPullReplication(getReplicationURL());
String noFilterCheckpointDocId = pullerNoFilter.remoteCheckpointDocID();
Replication pullerWithFilter1 = database.createPullReplication(getReplicationURL());
pullerWithFilter1.setFilter("foo/bar");
Map<String, Object> filterParams= new HashMap<String, Object>();
filterParams.put("a", "aval");
filterParams.put("b", "bval");
pullerWithFilter1.setDocIds(Arrays.asList("doc3", "doc1", "doc2"));
pullerWithFilter1.setFilterParams(filterParams);
String withFilterCheckpointDocId = pullerWithFilter1.remoteCheckpointDocID();
assertFalse(withFilterCheckpointDocId.equals(noFilterCheckpointDocId));
Replication pullerWithFilter2 = database.createPullReplication(getReplicationURL());
pullerWithFilter2.setFilter("foo/bar");
filterParams= new HashMap<String, Object>();
filterParams.put("b", "bval");
filterParams.put("a", "aval");
pullerWithFilter2.setDocIds(Arrays.asList("doc2", "doc3", "doc1"));
pullerWithFilter2.setFilterParams(filterParams);
String withFilterCheckpointDocId2 = pullerWithFilter2.remoteCheckpointDocID();
assertTrue(withFilterCheckpointDocId.equals(withFilterCheckpointDocId2));
}
public void testSetReplicationCookie() throws Exception {
URL replicationUrl = getReplicationURL();
Replication puller = database.createPullReplication(replicationUrl);
String cookieName = "foo";
String cookieVal = "bar";
boolean isSecure = false;
boolean httpOnly = false;
// expiration date - 1 day from now
Calendar cal = Calendar.getInstance();
cal.setTime(new Date());
int numDaysToAdd = 1;
cal.add(Calendar.DATE, numDaysToAdd);
Date expirationDate = cal.getTime();
// set the cookie
puller.setCookie(cookieName, cookieVal, "", expirationDate, isSecure, httpOnly);
// make sure it made it into cookie store and has expected params
CookieStore cookieStore = puller.getClientFactory().getCookieStore();
List<Cookie> cookies = cookieStore.getCookies();
assertEquals(1, cookies.size());
Cookie cookie = cookies.get(0);
assertEquals(cookieName, cookie.getName());
assertEquals(cookieVal, cookie.getValue());
assertEquals(replicationUrl.getHost(), cookie.getDomain());
assertEquals(replicationUrl.getPath(), cookie.getPath());
assertEquals(expirationDate, cookie.getExpiryDate());
assertEquals(isSecure, cookie.isSecure());
// add a second cookie
String cookieName2 = "foo2";
puller.setCookie(cookieName2, cookieVal, "", expirationDate, isSecure, false);
assertEquals(2, cookieStore.getCookies().size());
// delete cookie
puller.deleteCookie(cookieName2);
// should only have the original cookie left
assertEquals(1, cookieStore.getCookies().size());
assertEquals(cookieName, cookieStore.getCookies().get(0).getName());
}
public void testChangesFeedWithPurgedDoc() throws Exception {
//generate documents ids
String doc1Id = "doc1-" + System.currentTimeMillis();
String doc2Id = "doc2-" + System.currentTimeMillis();
String doc3Id = "doc3-" + System.currentTimeMillis();
//generate mock documents
final MockDocumentGet.MockDocument mockDocument1 = new MockDocumentGet.MockDocument(
doc1Id, "1-a", 1);
mockDocument1.setJsonMap(MockHelper.generateRandomJsonMap());
final MockDocumentGet.MockDocument mockDocument2 = new MockDocumentGet.MockDocument(
doc2Id, "1-b", 2);
mockDocument2.setJsonMap(MockHelper.generateRandomJsonMap());
final MockDocumentGet.MockDocument mockDocument3 = new MockDocumentGet.MockDocument(
doc3Id, "1-c", 3);
mockDocument3.setJsonMap(MockHelper.generateRandomJsonMap());
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
//add response to _local request
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
//add response to _changes request
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument1));
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument2));
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument3));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// doc1 response
MockDocumentGet mockDocumentGet1 = new MockDocumentGet(mockDocument1);
dispatcher.enqueueResponse(mockDocument1.getDocPathRegex(), mockDocumentGet1.generateMockResponse());
// doc2 missing reponse
MockResponse missingDocumentMockResponse = new MockResponse();
MockHelper.set404NotFoundJson(missingDocumentMockResponse);
dispatcher.enqueueResponse(mockDocument2.getDocPathRegex(), missingDocumentMockResponse);
// doc3 response
MockDocumentGet mockDocumentGet3 = new MockDocumentGet(mockDocument3);
dispatcher.enqueueResponse(mockDocument3.getDocPathRegex(), mockDocumentGet3.generateMockResponse());
// checkpoint PUT response
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// start mock server
server.play();
//create url for replication
URL baseUrl = server.getUrl("/db");
//create replication
Replication pullReplication = database.createPullReplication(baseUrl);
pullReplication.setContinuous(false);
//add change listener to notify when the replication is finished
CountDownLatch replicationFinishedContCountDownLatch = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver =
new ReplicationFinishedObserver(replicationFinishedContCountDownLatch);
pullReplication.addChangeListener(replicationFinishedObserver);
//start replication
pullReplication.start();
boolean success = replicationFinishedContCountDownLatch.await(100, TimeUnit.SECONDS);
assertTrue(success);
if (pullReplication.getLastError() != null) {
Log.d(TAG, "Replication had error: " + ((HttpResponseException) pullReplication.getLastError()).getStatusCode());
}
//assert document 1 was correctly pulled
Document doc1 = database.getDocument(doc1Id);
assertNotNull(doc1);
assertNotNull(doc1.getCurrentRevision());
//assert it was impossible to pull doc2
Document doc2 = database.getDocument(doc2Id);
assertNotNull(doc2);
assertNull(doc2.getCurrentRevision());
//assert it was possible to pull doc3
Document doc3 = database.getDocument(doc3Id);
assertNotNull(doc3);
assertNotNull(doc3.getCurrentRevision());
// wait until the replicator PUT's checkpoint with mockDocument3's sequence
waitForPutCheckpointRequestWithSeq(dispatcher, mockDocument3.getDocSeq());
//last saved seq must be equal to last pulled document seq
String doc3Seq = Integer.toString(mockDocument3.getDocSeq());
String lastSequence = database.lastSequenceWithCheckpointId(pullReplication.remoteCheckpointDocID());
assertEquals(doc3Seq, lastSequence);
//stop mock server
server.shutdown();
}
public void testPushPurgedDoc() throws Throwable {
int numBulkDocRequests = 0;
HttpPost lastBulkDocsRequest = null;
Map<String,Object> properties = new HashMap<String, Object>();
properties.put("testName", "testPurgeDocument");
Document doc = createDocumentWithProperties(database, properties);
assertNotNull(doc);
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderRevDiffsAllMissing();
mockHttpClient.setResponseDelayMilliseconds(250);
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
HttpClientFactory mockHttpClientFactory = new HttpClientFactory() {
@Override
public HttpClient getHttpClient() {
return mockHttpClient;
}
@Override
public void addCookies(List<Cookie> cookies) {
}
@Override
public void deleteCookie(String name) {
}
@Override
public CookieStore getCookieStore() {
return null;
}
};
URL remote = getReplicationURL();
manager.setDefaultHttpClientFactory(mockHttpClientFactory);
Replication pusher = database.createPushReplication(remote);
pusher.setContinuous(true);
final CountDownLatch replicationCaughtUpSignal = new CountDownLatch(1);
pusher.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
final int changesCount = event.getSource().getChangesCount();
final int completedChangesCount = event.getSource().getCompletedChangesCount();
String msg = String.format("changes: %d completed changes: %d", changesCount, completedChangesCount);
Log.d(TAG, msg);
if (changesCount == completedChangesCount && changesCount != 0) {
replicationCaughtUpSignal.countDown();
}
}
});
pusher.start();
// wait until that doc is pushed
boolean didNotTimeOut = replicationCaughtUpSignal.await(60, TimeUnit.SECONDS);
assertTrue(didNotTimeOut);
// at this point, we should have captured exactly 1 bulk docs request
numBulkDocRequests = 0;
for (HttpRequest capturedRequest : mockHttpClient.getCapturedRequests()) {
if (capturedRequest instanceof HttpPost && ((HttpPost) capturedRequest).getURI().toString().endsWith("_bulk_docs")) {
lastBulkDocsRequest = (HttpPost) capturedRequest;
numBulkDocRequests += 1;
}
}
assertEquals(1, numBulkDocRequests);
// that bulk docs request should have the "start" key under its _revisions
Map<String, Object> jsonMap = mockHttpClient.getJsonMapFromRequest((HttpPost) lastBulkDocsRequest);
List docs = (List) jsonMap.get("docs");
Map<String, Object> onlyDoc = (Map) docs.get(0);
Map<String, Object> revisions = (Map) onlyDoc.get("_revisions");
assertTrue(revisions.containsKey("start"));
// now add a new revision, which will trigger the pusher to try to push it
properties = new HashMap<String, Object>();
properties.put("testName2", "update doc");
UnsavedRevision unsavedRevision = doc.createRevision();
unsavedRevision.setUserProperties(properties);
unsavedRevision.save();
// but then immediately purge it
doc.purge();
// wait for a while to give the replicator a chance to push it
// (it should not actually push anything)
Thread.sleep(5*1000);
// we should not have gotten any more _bulk_docs requests, because
// the replicator should not have pushed anything else.
// (in the case of the bug, it was trying to push the purged revision)
numBulkDocRequests = 0;
for (HttpRequest capturedRequest : mockHttpClient.getCapturedRequests()) {
if (capturedRequest instanceof HttpPost && ((HttpPost) capturedRequest).getURI().toString().endsWith("_bulk_docs")) {
numBulkDocRequests += 1;
}
}
assertEquals(1, numBulkDocRequests);
stopReplication(pusher);
}
public void testPusherBatching() throws Throwable {
int previous = ReplicationInternal.INBOX_CAPACITY;
ReplicationInternal.INBOX_CAPACITY = 5;
try {
// create a bunch local documents
int numDocsToSend = ReplicationInternal.INBOX_CAPACITY * 3;
for (int i=0; i < numDocsToSend; i++) {
Map<String,Object> properties = new HashMap<String, Object>();
properties.put("testPusherBatching", i);
createDocumentWithProperties(database, properties);
}
// kick off a one time push replication to a mock
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
HttpClientFactory mockHttpClientFactory = mockFactoryFactory(mockHttpClient);
URL remote = getReplicationURL();
manager.setDefaultHttpClientFactory(mockHttpClientFactory);
Replication pusher = database.createPushReplication(remote);
runReplication(pusher);
assertNull(pusher.getLastError());
int numDocsSent = 0;
// verify that only INBOX_SIZE documents are included in any given bulk post request
List<HttpRequest> capturedRequests = mockHttpClient.getCapturedRequests();
for (HttpRequest capturedRequest : capturedRequests) {
if (capturedRequest instanceof HttpPost) {
HttpPost capturedPostRequest = (HttpPost) capturedRequest;
if (capturedPostRequest.getURI().getPath().endsWith("_bulk_docs")) {
ArrayList docs = CustomizableMockHttpClient.extractDocsFromBulkDocsPost(capturedRequest);
String msg = "# of bulk docs pushed should be <= INBOX_CAPACITY";
assertTrue(msg, docs.size() <= ReplicationInternal.INBOX_CAPACITY);
numDocsSent += docs.size();
}
}
}
assertEquals(numDocsToSend, numDocsSent);
} finally {
ReplicationInternal.INBOX_CAPACITY = previous;
}
}
public void failingTestPullerGzipped() throws Throwable {
// TODO: rewrite w/ MockWebserver
/*String docIdTimestamp = Long.toString(System.currentTimeMillis());
final String doc1Id = String.format("doc1-%s", docIdTimestamp);
String attachmentName = "attachment.png";
addDocWithId(doc1Id, attachmentName, true);
doPullReplication();
Log.d(TAG, "Fetching doc1 via id: " + doc1Id);
Document doc1 = database.getDocument(doc1Id);
assertNotNull(doc1);
assertTrue(doc1.getCurrentRevisionId().startsWith("1-"));
assertEquals(1, doc1.getProperties().get("foo"));
Attachment attachment = doc1.getCurrentRevision().getAttachment(attachmentName);
assertTrue(attachment.getLength() > 0);
assertTrue(attachment.getGZipped());
InputStream is = attachment.getContent();
byte[] receivedBytes = TextUtils.read(is);
is.close();
InputStream attachmentStream = getAsset(attachmentName);
byte[] actualBytes = TextUtils.read(attachmentStream);
Assert.assertEquals(actualBytes.length, receivedBytes.length);
Assert.assertEquals(actualBytes, receivedBytes);*/
}
/**
* Verify that validation blocks are called correctly for docs
* pulled from the sync gateway.
*
* - Add doc to (mock) sync gateway
* - Add validation function that will reject that doc
* - Do a pull replication
* - Assert that the doc does _not_ make it into the db
*
*/
public void testValidationBlockCalled() throws Throwable {
final MockDocumentGet.MockDocument mockDocument = new MockDocumentGet.MockDocument("doc1", "1-3e28", 1);
mockDocument.setJsonMap(MockHelper.generateRandomJsonMap());
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDocument));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// doc response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDocument);
dispatcher.enqueueResponse(mockDocument.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// checkpoint PUT response
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, new MockCheckpointPut());
// start mock server
server.play();
// Add Validation block
database.setValidation("testValidationBlockCalled", new Validator() {
@Override
public void validate(Revision newRevision, ValidationContext context) {
if (newRevision.getDocument().getId().equals(mockDocument.getDocId())) {
context.reject("Reject");
}
}
});
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
runReplication(pullReplication);
waitForPutCheckpointRequestWithSeq(dispatcher, mockDocument.getDocSeq());
// assert doc is not in local db
Document doc = database.getDocument(mockDocument.getDocId());
assertNull(doc.getCurrentRevision()); // doc should have been rejected by validation, and therefore not present
server.shutdown();
}
public void testMockPullerRestart() throws Exception {
final int numMockRemoteDocs = 20; // must be multiple of 10!
final AtomicInteger numDocsPulledLocally = new AtomicInteger(0);
MockDispatcher dispatcher = new MockDispatcher();
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
int numDocsPerChangesResponse = numMockRemoteDocs / 10;
MockWebServer server = MockHelper.getPreloadedPullTargetMockCouchDB(dispatcher, numMockRemoteDocs, numDocsPerChangesResponse);
server.play();
final CountDownLatch receivedAllDocs = new CountDownLatch(1);
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
// it should go idle twice, hence countdown latch = 2
final CountDownLatch replicationIdleFirstTime = new CountDownLatch(1);
final CountDownLatch replicationIdleSecondTime = new CountDownLatch(2);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getTransition() != null && event.getTransition().getDestination() == ReplicationState.IDLE) {
replicationIdleFirstTime.countDown();
replicationIdleSecondTime.countDown();
}
}
});
database.addChangeListener(new Database.ChangeListener() {
@Override
public void changed(Database.ChangeEvent event) {
List<DocumentChange> changes = event.getChanges();
for (DocumentChange change : changes) {
numDocsPulledLocally.addAndGet(1);
}
if (numDocsPulledLocally.get() == numMockRemoteDocs) {
receivedAllDocs.countDown();
}
}
});
pullReplication.start();
// wait until we received all mock docs or timeout occurs
boolean success = receivedAllDocs.await(60, TimeUnit.SECONDS);
assertTrue(success);
// wait until replication goes idle
success = replicationIdleFirstTime.await(60, TimeUnit.SECONDS);
assertTrue(success);
pullReplication.restart();
// wait until replication goes idle again
success = replicationIdleSecondTime.await(60, TimeUnit.SECONDS);
assertTrue(success);
stopReplication(pullReplication);
// cleanup / shutdown
server.shutdown();
}
public void testRunReplicationWithError() throws Exception {
HttpClientFactory mockHttpClientFactory = new HttpClientFactory() {
@Override
public HttpClient getHttpClient() {
CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
int statusCode = 406;
mockHttpClient.addResponderFailAllRequests(statusCode);
return mockHttpClient;
}
@Override
public void addCookies(List<Cookie> cookies) {
}
@Override
public void deleteCookie(String name) {
}
@Override
public CookieStore getCookieStore() {
return null;
}
};
manager.setDefaultHttpClientFactory(mockHttpClientFactory);
Replication r1 = database.createPushReplication(getReplicationURL());
Assert.assertFalse(r1.isContinuous());
runReplication(r1);
// It should have failed with a 404:
Assert.assertEquals(0, r1.getCompletedChangesCount());
Assert.assertEquals(0, r1.getChangesCount());
Assert.assertNotNull(r1.getLastError());
}
public void testBuildRelativeURLString() throws Exception {
String dbUrlString = "http://10.0.0.3:4984/todos/";
Replication replication = database.createPullReplication(new URL(dbUrlString));
String relativeUrlString = replication.buildRelativeURLString("foo");
String expected = "http://10.0.0.3:4984/todos/foo";
Assert.assertEquals(expected, relativeUrlString);
}
public void testBuildRelativeURLStringWithLeadingSlash() throws Exception {
String dbUrlString = "http://10.0.0.3:4984/todos/";
Replication replication = database.createPullReplication(new URL(dbUrlString));
String relativeUrlString = replication.buildRelativeURLString("/foo");
String expected = "http://10.0.0.3:4984/todos/foo";
Assert.assertEquals(expected, relativeUrlString);
}
public void testChannels() throws Exception {
URL remote = getReplicationURL();
Replication replicator = database.createPullReplication(remote);
List<String> channels = new ArrayList<String>();
channels.add("chan1");
channels.add("chan2");
replicator.setChannels(channels);
Assert.assertEquals(channels, replicator.getChannels());
replicator.setChannels(null);
Assert.assertTrue(replicator.getChannels().isEmpty());
}
public void testChannelsMore() throws MalformedURLException, CouchbaseLiteException {
Database db = startDatabase();
URL fakeRemoteURL = new URL("http://couchbase.com/no_such_db");
Replication r1 = db.createPullReplication(fakeRemoteURL);
assertTrue(r1.getChannels().isEmpty());
r1.setFilter("foo/bar");
assertTrue(r1.getChannels().isEmpty());
Map<String, Object> filterParams= new HashMap<String, Object>();
filterParams.put("a", "b");
r1.setFilterParams(filterParams);
assertTrue(r1.getChannels().isEmpty());
r1.setChannels(null);
assertEquals("foo/bar", r1.getFilter());
assertEquals(filterParams, r1.getFilterParams());
List<String> channels = new ArrayList<String>();
channels.add("NBC");
channels.add("MTV");
r1.setChannels(channels);
assertEquals(channels, r1.getChannels());
assertEquals("sync_gateway/bychannel", r1.getFilter());
filterParams= new HashMap<String, Object>();
filterParams.put("channels", "NBC,MTV");
assertEquals(filterParams, r1.getFilterParams());
r1.setChannels(null);
assertEquals(r1.getFilter(), null);
assertEquals(null ,r1.getFilterParams());
}
public void testPushReplicationRecoverableError() throws Exception {
int statusCode = 503;
String statusMsg = "Transient Error";
boolean expectReplicatorError = false;
runPushReplicationWithTransientError(statusCode, statusMsg, expectReplicatorError);
}
public void testPushReplicationRecoverableIOException() throws Exception {
int statusCode = -1; // code to tell it to throw an IOException
String statusMsg = null;
boolean expectReplicatorError = false;
runPushReplicationWithTransientError(statusCode, statusMsg, expectReplicatorError);
}
public void testPushReplicationNonRecoverableError() throws Exception {
int statusCode = 404;
String statusMsg = "NOT FOUND";
boolean expectReplicatorError = true;
runPushReplicationWithTransientError(statusCode, statusMsg, expectReplicatorError);
}
public void runPushReplicationWithTransientError(int statusCode, String statusMsg, boolean expectReplicatorError) throws Exception {
int previous = RemoteRequestRetry.RETRY_DELAY_MS;
RemoteRequestRetry.RETRY_DELAY_MS = 5;
try {
Map<String,Object> properties1 = new HashMap<String,Object>();
properties1.put("doc1", "testPushReplicationTransientError");
createDocWithProperties(properties1);
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderFakeLocalDocumentUpdate404();
CustomizableMockHttpClient.Responder sentinal = CustomizableMockHttpClient.fakeBulkDocsResponder();
Queue<CustomizableMockHttpClient.Responder> responders = new LinkedList<CustomizableMockHttpClient.Responder>();
responders.add(CustomizableMockHttpClient.transientErrorResponder(statusCode, statusMsg));
ResponderChain responderChain = new ResponderChain(responders, sentinal);
mockHttpClient.setResponder("_bulk_docs", responderChain);
// create a replication observer to wait until replication finishes
CountDownLatch replicationDoneSignal = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal);
// create replication and add observer
manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient));
Replication pusher = database.createPushReplication(getReplicationURL());
pusher.addChangeListener(replicationFinishedObserver);
// save the checkpoint id for later usage
String checkpointId = pusher.remoteCheckpointDocID();
// kick off the replication
pusher.start();
// wait for it to finish
boolean success = replicationDoneSignal.await(60, TimeUnit.SECONDS);
assertTrue(success);
Log.d(TAG, "replicationDoneSignal finished");
if (expectReplicatorError == true) {
assertNotNull(pusher.getLastError());
} else {
assertNull(pusher.getLastError());
}
// workaround for the fact that the replicationDoneSignal.wait() call will unblock before all
// the statements in Replication.stopped() have even had a chance to execute.
// (specifically the ones that come after the call to notifyChangeListeners())
Log.d(TAG, "sleeping 1 second");
Thread.sleep(1 * 1000);
Log.d(TAG, "done sleeping");
String localLastSequence = database.lastSequenceWithCheckpointId(checkpointId);
Log.d(TAG, "localLastSequence: %s", localLastSequence);
if (expectReplicatorError == true) {
assertNull(localLastSequence);
} else {
assertNotNull(localLastSequence);
}
} finally {
RemoteRequestRetry.RETRY_DELAY_MS = previous;
}
}
/**
* Verify that running a one-shot push replication will complete when run against a
* mock server that throws io exceptions on every request.
*/
public void testOneShotReplicationErrorNotification() throws Throwable {
int previous = RemoteRequestRetry.RETRY_DELAY_MS;
RemoteRequestRetry.RETRY_DELAY_MS = 5;
try {
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderThrowExceptionAllRequests();
URL remote = getReplicationURL();
manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient));
Replication pusher = database.createPushReplication(remote);
runReplication(pusher);
assertTrue(pusher.getLastError() != null);
} finally {
RemoteRequestRetry.RETRY_DELAY_MS = previous;
}
}
/**
* Verify that running a continuous push replication will emit a change while
* in an error state when run against a mock server that returns 500 Internal Server
* errors on every request.
*/
public void testContinuousReplicationErrorNotification() throws Throwable {
int previous = RemoteRequestRetry.RETRY_DELAY_MS;
RemoteRequestRetry.RETRY_DELAY_MS = 5;
try {
final CustomizableMockHttpClient mockHttpClient = new CustomizableMockHttpClient();
mockHttpClient.addResponderThrowExceptionAllRequests();
URL remote = getReplicationURL();
manager.setDefaultHttpClientFactory(mockFactoryFactory(mockHttpClient));
Replication pusher = database.createPushReplication(remote);
pusher.setContinuous(true);
// add replication observer
final CountDownLatch countDownLatch = new CountDownLatch(1);
pusher.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getError() != null) {
countDownLatch.countDown();
}
}
});
// start replication
pusher.start();
boolean success = countDownLatch.await(30, TimeUnit.SECONDS);
assertTrue(success);
stopReplication(pusher);
} finally {
RemoteRequestRetry.RETRY_DELAY_MS = previous;
}
}
/**
* Test for the goOffline() method.
*/
public void testGoOffline() throws Exception {
final int numMockDocsToServe = 2;
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
server.play();
// mock documents to be pulled
MockDocumentGet.MockDocument mockDoc1 = new MockDocumentGet.MockDocument("doc1", "1-5e38", 1);
mockDoc1.setJsonMap(MockHelper.generateRandomJsonMap());
mockDoc1.setAttachmentName("attachment.png");
MockDocumentGet.MockDocument mockDoc2 = new MockDocumentGet.MockDocument("doc2", "1-563b", 2);
mockDoc2.setJsonMap(MockHelper.generateRandomJsonMap());
mockDoc2.setAttachmentName("attachment2.png");
// fake checkpoint PUT and GET response w/ 404
MockCheckpointPut fakeCheckpointResponse = new MockCheckpointPut();
fakeCheckpointResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// _changes response with docs
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc1));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// next _changes response will block (eg, longpoll reponse with no changes to return)
MockChangesFeed mockChangesFeedEmpty = new MockChangesFeed();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeedEmpty.generateMockResponse());
// doc1 response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDoc1);
dispatcher.enqueueResponse(mockDoc1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// doc2 response
mockDocumentGet = new MockDocumentGet(mockDoc2);
dispatcher.enqueueResponse(mockDoc2.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// create replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setContinuous(true);
// add a change listener
final CountDownLatch idleCountdownLatch = new CountDownLatch(1);
final CountDownLatch receivedAllDocs = new CountDownLatch(1);
pullReplication.addChangeListener(new Replication.ChangeListener() {
@Override
public void changed(Replication.ChangeEvent event) {
if (event.getTransition() != null && event.getTransition().getDestination() == ReplicationState.IDLE) {
idleCountdownLatch.countDown();
}
if (event.getCompletedChangeCount() == numMockDocsToServe) {
receivedAllDocs.countDown();
}
}
});
// start replication
pullReplication.start();
// wait until it goes into idle state
boolean success = idleCountdownLatch.await(120, TimeUnit.SECONDS);
assertTrue(success);
// put the replication offline
putReplicationOffline(pullReplication);
// at this point, we shouldn't have received all of the docs yet.
assertTrue(receivedAllDocs.getCount() > 0);
// return some more docs on _changes feed
MockChangesFeed mockChangesFeed2 = new MockChangesFeed();
mockChangesFeed2.add(new MockChangesFeed.MockChangedDoc(mockDoc2));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed2.generateMockResponse());
// put the replication online (should see the new docs)
putReplicationOnline(pullReplication);
// wait until we receive all the docs
success = receivedAllDocs.await(120, TimeUnit.SECONDS);
assertTrue(success);
// wait until we try to PUT a checkpoint request with doc2's sequence
waitForPutCheckpointRequestWithSeq(dispatcher, mockDoc2.getDocSeq());
// make sure all docs in local db
Map<String, Object> allDocs = database.getAllDocs(new QueryOptions());
Integer totalRows = (Integer) allDocs.get("total_rows");
List rows = (List) allDocs.get("rows");
assertEquals(numMockDocsToServe, totalRows.intValue());
assertEquals(numMockDocsToServe, rows.size());
// cleanup
stopReplication(pullReplication);
server.shutdown();
}
private void putReplicationOffline(Replication replication) throws InterruptedException {
// this was a useless test, the replication wasn't even started
final CountDownLatch wentOffline = new CountDownLatch(1);
Replication.ChangeListener changeListener = new ReplicationOfflineObserver(wentOffline);
replication.addChangeListener(changeListener);
replication.goOffline();
boolean succeeded = wentOffline.await(30, TimeUnit.SECONDS);
assertTrue(succeeded);
replication.removeChangeListener(changeListener);
}
private void putReplicationOnline(Replication replication) throws InterruptedException {
// this was a useless test, the replication wasn't even started
final CountDownLatch wentOnline = new CountDownLatch(1);
Replication.ChangeListener changeListener = new ReplicationActiveObserver(wentOnline);
replication.addChangeListener(changeListener);
replication.goOnline();
boolean succeeded = wentOnline.await(30, TimeUnit.SECONDS);
assertTrue(succeeded);
replication.removeChangeListener(changeListener);
}
public void testReplicationOnlineExtraneousChangeTrackers() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.COUCHDB);
// add sticky checkpoint GET response w/ 404
MockCheckpointGet fakeCheckpointResponse = new MockCheckpointGet();
fakeCheckpointResponse.set404(true);
fakeCheckpointResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// add sticky _changes response to feed=longpoll that just blocks for 60 seconds to emulate
// server that doesn't have any new changes
MockChangesFeedNoResponse mockChangesFeedNoResponse = new MockChangesFeedNoResponse();
mockChangesFeedNoResponse.setDelayMs(60 * 1000);
mockChangesFeedNoResponse.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES_LONGPOLL, mockChangesFeedNoResponse);
// add _changes response to feed=normal that returns empty _changes feed immediately
MockChangesFeed mockChangesFeed = new MockChangesFeed();
MockResponse mockResponse = mockChangesFeed.generateMockResponse();
for (int i=0; i<500; i++) { // TODO: use setSticky instead of workaround to add a ton of mock responses
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES_NORMAL, new WrappedSmartMockResponse(mockResponse));
}
// start mock server
server.play();
//create url for replication
URL baseUrl = server.getUrl("/db");
//create replication
final Replication pullReplication = database.createPullReplication(baseUrl);
pullReplication.setContinuous(true);
pullReplication.start();
// wait until we get a request to the _changes feed
RecordedRequest changesReq = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHANGES_LONGPOLL);
assertNotNull(changesReq);
putReplicationOffline(pullReplication);
// at this point since we called takeRequest earlier, our recorded _changes request queue should be empty
assertNull(dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES_LONGPOLL));
// put replication online 10 times
for (int i = 0; i < 10; i++) {
pullReplication.goOnline();
}
// sleep for a while to give things a chance to start
Log.d(TAG, "sleeping for 2 seconds");
Thread.sleep(2 * 1000);
Log.d(TAG, "done sleeping");
// how many _changes feed requests has the replicator made since going online?
int numChangesRequests = 0;
while ((changesReq = dispatcher.takeRequest(MockHelper.PATH_REGEX_CHANGES_LONGPOLL)) != null) {
Log.d(TAG, "changesReq: %s", changesReq);
numChangesRequests += 1;
}
// assert that there was only one _changes feed request
assertEquals(1, numChangesRequests);
// shutdown
stopReplication(pullReplication);
server.shutdown();
}
/**
* Test goOffline() method in the context of a continuous pusher.
*
* - Kick off continuous push replication
* - Add a local document
* - Wait for document to be pushed
* - Call goOffline()
* - Add a 2nd local document
* - Call goOnline()
* - Wait for 2nd document to be pushed
*
* @throws Exception
*/
public void testGoOfflinePusher() throws Exception {
int previous = RemoteRequestRetry.RETRY_DELAY_MS;
RemoteRequestRetry.RETRY_DELAY_MS = 5;
try {
// create local docs
Map<String,Object> properties = new HashMap<String, Object>();
properties.put("testGoOfflinePusher", "1");
Document doc1 = createDocumentWithProperties(database, properties);
// create mock server
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = new MockWebServer();
server.setDispatcher(dispatcher);
server.play();
// checkpoint PUT or GET response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// _revs_diff response -- everything missing
MockRevsDiff mockRevsDiff = new MockRevsDiff();
mockRevsDiff.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// _bulk_docs response -- everything stored
MockBulkDocs mockBulkDocs = new MockBulkDocs();
mockBulkDocs.setSticky(true);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_BULK_DOCS, mockBulkDocs);
// create and start push replication
Replication replicator = database.createPushReplication(server.getUrl("/db"));
replicator.setContinuous(true);
CountDownLatch replicationIdleSignal = new CountDownLatch(1);
ReplicationIdleObserver replicationIdleObserver = new ReplicationIdleObserver(replicationIdleSignal);
replicator.addChangeListener(replicationIdleObserver);
replicator.start();
// wait until replication goes idle
boolean successful = replicationIdleSignal.await(30, TimeUnit.SECONDS);
assertTrue(successful);
// wait until mock server gets the checkpoint PUT request
boolean foundCheckpointPut = false;
String expectedLastSequence = "1";
while (!foundCheckpointPut) {
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHECKPOINT);
if (request.getMethod().equals("PUT")) {
foundCheckpointPut = true;
Assert.assertTrue(request.getUtf8Body().indexOf(expectedLastSequence) != -1);
// wait until mock server responds to the checkpoint PUT request
dispatcher.takeRecordedResponseBlocking(request);
}
}
putReplicationOffline(replicator);
// during this time, any requests to server will fail, because we
// are simulating being offline. (whether or not the pusher should
// even be _sending_ requests during this time is a different story)
dispatcher.clearQueuedResponse(MockHelper.PATH_REGEX_REVS_DIFF);
dispatcher.clearRecordedRequests(MockHelper.PATH_REGEX_REVS_DIFF);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, new SmartMockResponse() {
@Override
public MockResponse generateMockResponse(RecordedRequest request) {
return new MockResponse().setResponseCode(500);
}
@Override
public boolean isSticky() {
return true;
}
@Override
public long delayMs() {
return 0;
}
});
// add a 2nd doc to local db
properties = new HashMap<String, Object>();
properties.put("testGoOfflinePusher", "2");
Document doc2 = createDocumentWithProperties(database, properties);
// currently, even when offline, adding a new doc will cause it to try pushing the
// doc. (this is questionable behavior, need to check against iOS). It will retry
// twice, so lets wait for two requests to /_revs_diff
RecordedRequest revsDiffRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_REVS_DIFF);
dispatcher.takeRecordedResponseBlocking(revsDiffRequest);
revsDiffRequest = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_REVS_DIFF);
dispatcher.takeRecordedResponseBlocking(revsDiffRequest);
putReplicationOnline(replicator);
// we are going online again, so the mockwebserver should accept _revs_diff responses again
dispatcher.clearQueuedResponse(MockHelper.PATH_REGEX_REVS_DIFF);
dispatcher.clearRecordedRequests(MockHelper.PATH_REGEX_REVS_DIFF);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_REVS_DIFF, mockRevsDiff);
// wait until mock server gets the 2nd checkpoint PUT request
foundCheckpointPut = false;
expectedLastSequence = "2";
while (!foundCheckpointPut) {
RecordedRequest request = dispatcher.takeRequestBlocking(MockHelper.PATH_REGEX_CHECKPOINT);
if (request.getMethod().equals("PUT")) {
foundCheckpointPut = true;
Assert.assertTrue(request.getUtf8Body().indexOf(expectedLastSequence) != -1);
// wait until mock server responds to the checkpoint PUT request
dispatcher.takeRecordedResponseBlocking(request);
}
}
// make some assertions about the outgoing _bulk_docs requests
RecordedRequest bulkDocsRequest1 = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(bulkDocsRequest1);
assertBulkDocJsonContainsDoc(bulkDocsRequest1, doc1);
RecordedRequest bulkDocsRequest2 = dispatcher.takeRequest(MockHelper.PATH_REGEX_BULK_DOCS);
assertNotNull(bulkDocsRequest2);
assertBulkDocJsonContainsDoc(bulkDocsRequest2, doc2);
// cleanup
stopReplication(replicator);
server.shutdown();
} finally {
RemoteRequestRetry.RETRY_DELAY_MS = previous;
}
}
/**
* Verify that when a replication runs into an auth error, it stops
* and the lastError() method returns that error.
*/
public void testReplicatorErrorStatus() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
// fake _session response
MockSessionGet mockSessionGet = new MockSessionGet();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_SESSION, mockSessionGet.generateMockResponse());
// fake _facebook response
MockFacebookAuthPost mockFacebookAuthPost = new MockFacebookAuthPost();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_FACEBOOK_AUTH, mockFacebookAuthPost.generateMockResponse());
// start mock server
server.play();
// register bogus fb token
Authenticator facebookAuthenticator = AuthenticatorFactory.createFacebookAuthenticator("fake_access_token");
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
pullReplication.setAuthenticator(facebookAuthenticator);
pullReplication.setContinuous(false);
runReplication(pullReplication);
// run replicator and make sure it has an error
assertNotNull(pullReplication.getLastError());
assertTrue(pullReplication.getLastError() instanceof HttpResponseException);
assertEquals(401 /* unauthorized */, ((HttpResponseException)pullReplication.getLastError()).getStatusCode());
// assert that the replicator sent the requests we expected it to send
RecordedRequest sessionReqeust = dispatcher.takeRequest(MockHelper.PATH_REGEX_SESSION);
assertNotNull(sessionReqeust);
RecordedRequest facebookRequest = dispatcher.takeRequest(MockHelper.PATH_REGEX_FACEBOOK_AUTH);
assertNotNull(facebookRequest);
dispatcher.verifyAllRecordedRequestsTaken();
}
public void testGetReplicator() throws Throwable {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
// checkpoint PUT or GET response (sticky)
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
mockCheckpointPut.setSticky(true);
mockCheckpointPut.setDelayMs(500);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
server.play();
Map<String,Object> properties = new HashMap<String,Object>();
properties.put("source", DEFAULT_TEST_DB);
properties.put("target", server.getUrl("/db").toExternalForm());
Map<String,Object> headers = new HashMap<String,Object>();
String coolieVal = "SyncGatewaySession=c38687c2696688a";
headers.put("Cookie", coolieVal);
properties.put("headers", headers);
Replication replicator = manager.getReplicator(properties);
assertNotNull(replicator);
assertEquals(server.getUrl("/db").toExternalForm(), replicator.getRemoteUrl().toExternalForm());
assertTrue(!replicator.isPull());
assertFalse(replicator.isContinuous());
assertFalse(replicator.isRunning());
assertTrue(replicator.getHeaders().containsKey("Cookie"));
assertEquals(replicator.getHeaders().get("Cookie"), coolieVal);
// add replication observer
CountDownLatch replicationDoneSignal = new CountDownLatch(1);
ReplicationFinishedObserver replicationFinishedObserver = new ReplicationFinishedObserver(replicationDoneSignal);
replicator.addChangeListener(replicationFinishedObserver);
// start the replicator
Log.d(TAG, "Starting replicator " + replicator);
replicator.start();
final CountDownLatch replicationStarted = new CountDownLatch(1);
replicator.addChangeListener(new ReplicationActiveObserver(replicationStarted));
boolean success = replicationStarted.await(30, TimeUnit.SECONDS);
assertTrue(success);
// now lets lookup existing replicator and stop it
Log.d(TAG, "Looking up replicator");
properties.put("cancel", true);
Replication activeReplicator = manager.getReplicator(properties);
Log.d(TAG, "Found replicator " + activeReplicator + " and calling stop()");
activeReplicator.stop();
Log.d(TAG, "called stop(), waiting for it to finish");
// wait for replication to finish
boolean didNotTimeOut = replicationDoneSignal.await(180, TimeUnit.SECONDS);
Log.d(TAG, "replicationDoneSignal.await done, didNotTimeOut: " + didNotTimeOut);
assertTrue(didNotTimeOut);
assertFalse(activeReplicator.isRunning());
server.shutdown();
}
public void testGetReplicatorWithAuth() throws Throwable {
Map<String,Object> authProperties = getReplicationAuthParsedJson();
Map<String,Object> targetProperties = new HashMap<String,Object>();
targetProperties.put("url", getReplicationURL().toExternalForm());
targetProperties.put("auth", authProperties);
Map<String,Object> properties = new HashMap<String,Object>();
properties.put("source", DEFAULT_TEST_DB);
properties.put("target", targetProperties);
Replication replicator = manager.getReplicator(properties);
assertNotNull(replicator);
assertNotNull(replicator.getAuthenticator());
assertTrue(replicator.getAuthenticator() instanceof FacebookAuthorizer);
}
/**
*
* When the server returns a 409 error to a PUT checkpoint response, make
* sure it does the right thing:
* - Pull latest remote checkpoint
* - Try to push checkpiont again (this time passing latest rev)
*
* @throws Exception
*/
public void testPutCheckpoint409Recovery() throws Exception {
// create mockwebserver and custom dispatcher
MockDispatcher dispatcher = new MockDispatcher();
MockWebServer server = MockHelper.getMockWebServer(dispatcher);
dispatcher.setServerType(MockDispatcher.ServerType.SYNC_GW);
// mock documents to be pulled
MockDocumentGet.MockDocument mockDoc1 = new MockDocumentGet.MockDocument("doc1", "1-5e38", 1);
mockDoc1.setJsonMap(MockHelper.generateRandomJsonMap());
// checkpoint GET response w/ 404
MockResponse fakeCheckpointResponse = new MockResponse();
MockHelper.set404NotFoundJson(fakeCheckpointResponse);
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, fakeCheckpointResponse);
// _changes response
MockChangesFeed mockChangesFeed = new MockChangesFeed();
mockChangesFeed.add(new MockChangesFeed.MockChangedDoc(mockDoc1));
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHANGES, mockChangesFeed.generateMockResponse());
// doc1 response
MockDocumentGet mockDocumentGet = new MockDocumentGet(mockDoc1);
dispatcher.enqueueResponse(mockDoc1.getDocPathRegex(), mockDocumentGet.generateMockResponse());
// respond with 409 error to mock checkpoint PUT
MockResponse checkpointResponse409 = new MockResponse();
checkpointResponse409.setStatus("HTTP/1.1 409 CONFLICT");
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, checkpointResponse409);
// the replicator should then try to do a checkpoint GET, and in this case
// it should return a value with a rev id
MockCheckpointGet mockCheckpointGet = new MockCheckpointGet();
mockCheckpointGet.setOk("true");
mockCheckpointGet.setRev("0-1");
mockCheckpointGet.setLastSequence("0");
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointGet);
// the replicator should then try a checkpoint PUT again
// and we should respond with a 201
MockCheckpointPut mockCheckpointPut = new MockCheckpointPut();
dispatcher.enqueueResponse(MockHelper.PATH_REGEX_CHECKPOINT, mockCheckpointPut);
// start mock server
server.play();
// run pull replication
Replication pullReplication = database.createPullReplication(server.getUrl("/db"));
// I had to set this to continuous, because in a one-shot replication it tries to
// save the checkpoint asynchronously as the replicator is shutting down, which
// breaks the retry logic in the case a 409 conflict is returned by server.
pullReplication.setContinuous(true);
pullReplication.start();
// we should have gotten two requests to PATH_REGEX_CHECKPOINT:
// PUT -> 409 Conflict
// PUT -> 201 Created
for (int i=1; i<=2; i++) {
Log.v(TAG, "waiting for PUT checkpoint: %d", i);
waitForPutCheckpointRequestWithSeq(dispatcher, mockDoc1.getDocSeq());
Log.d(TAG, "got PUT checkpoint: %d", i);
}
stopReplication(pullReplication);
server.shutdown();
}
}
|
package com.minelittlepony.client.mixin;
import java.util.Map;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.gen.Accessor;
import net.minecraft.client.renderer.entity.RenderManager;
import net.minecraft.client.renderer.entity.RenderPlayer;
@Mixin(RenderManager.class)
public interface MixinRenderManager {
// There is a method to get it, but it's made immutable my Forge.
@Accessor("skinMap")
Map<String, RenderPlayer> getMutableSkinMap();
}
|
package com.github.yftx.AndroidHacks.sectionTipsAndTricks;
import android.app.Activity;
import android.content.Context;
import android.content.res.AssetManager;
import android.graphics.Typeface;
import android.os.Bundle;
import android.os.Handler;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import android.widget.TextView;
import com.github.yftx.AndroidHacks.R;
import java.util.Calendar;
import java.util.Date;
public class GlowTextView extends Activity {
private static final String DATE_FORMAT = "%02d:%02d:%02d";
private static final int REFRESH_DELAY = 500;
private static final String TAG = GlowTextView.class.getSimpleName() ;
private final Handler mHandler = new Handler();
private final Runnable mTimeRefresher = new Runnable() {
@Override
public void run() {
final Date d = new Date();
mTextView.setText(String.format(DATE_FORMAT, d.getHours(), d.getMinutes(), d.getSeconds()));
mHandler.postDelayed(this, REFRESH_DELAY);
}
};
private TextView mTextView;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.glow_text);
mTextView = (TextView) findViewById(R.id.main_clock_time);
Log.d(TAG,"out of post tv width " + mTextView.getWidth() + " height " + mTextView.getHeight());
mTextView.post(new Runnable() {
@Override
public void run() {
Log.d(TAG,"in post tv width " + mTextView.getWidth() + " height " + mTextView.getHeight());
}
});
}
public void onClickStartClock(View view) {
mHandler.post(mTimeRefresher);
}
@Override
protected void onStop() {
super.onStop();
mHandler.removeCallbacks(mTimeRefresher);
}
}
|
package com.headwire.aem.tooling.intellij.explorer;
import com.headwire.aem.tooling.intellij.config.ServerConfiguration;
import com.headwire.aem.tooling.intellij.config.ServerConfigurationManager;
import com.headwire.aem.tooling.intellij.ui.ServerConfigurationDialog;
import com.headwire.aem.tooling.intellij.util.ServerUtil;
import com.intellij.execution.RunManagerAdapter;
import com.intellij.execution.RunManagerEx;
import com.intellij.icons.AllIcons;
import com.intellij.ide.DataManager;
import com.intellij.ide.TreeExpander;
import com.intellij.ide.dnd.FileCopyPasteUtil;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.ActionPlaces;
import com.intellij.openapi.actionSystem.ActionPopupMenu;
import com.intellij.openapi.actionSystem.ActionToolbar;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.CommonShortcuts;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.DataProvider;
import com.intellij.openapi.actionSystem.DefaultActionGroup;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.actionSystem.Presentation;
import com.intellij.openapi.actionSystem.ToggleAction;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileChooser.FileChooser;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.keymap.Keymap;
import com.intellij.openapi.keymap.KeymapManagerListener;
import com.intellij.openapi.keymap.ex.KeymapManagerEx;
import com.intellij.openapi.keymap.impl.ui.EditKeymapsDialog;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.SimpleToolWindowPanel;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileAdapter;
import com.intellij.openapi.vfs.VirtualFileCopyEvent;
import com.intellij.openapi.vfs.VirtualFileEvent;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.openapi.vfs.VirtualFileMoveEvent;
import com.intellij.openapi.vfs.VirtualFilePropertyEvent;
import com.intellij.ui.ColoredTreeCellRenderer;
import com.intellij.ui.DoubleClickListener;
import com.intellij.ui.PopupHandler;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.SimpleTextAttributes;
import com.intellij.ui.TreeSpeedSearch;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.ArrayUtil;
import com.intellij.util.IconUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.tree.TreeUtil;
import com.intellij.util.xml.DomEventListener;
import com.intellij.util.xml.DomManager;
import com.intellij.util.xml.events.DomEvent;
import org.apache.sling.ide.impl.vlt.AddOrUpdateNodeCommand;
import org.apache.sling.ide.impl.vlt.VltRepositoryFactory;
import org.apache.sling.ide.transport.Command;
import org.apache.sling.ide.transport.FileInfo;
import org.apache.sling.ide.transport.Repository;
import org.apache.sling.ide.transport.RepositoryException;
import org.apache.sling.ide.transport.RepositoryFactory;
import org.apache.sling.ide.transport.RepositoryInfo;
import org.apache.sling.ide.transport.ResourceProxy;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.io.File;
import java.util.*;
public class SlingServerExplorer
extends SimpleToolWindowPanel implements DataProvider, Disposable
{
private static final Logger LOGGER = Logger.getInstance(SlingServerExplorer.class);
public static final String ROOT_FOLDER = "/jcr_root/";
private Project myProject;
private ServerExplorerTreeBuilder myBuilder;
private Tree myTree;
private KeymapListener myKeymapListener;
// private final AntBuildFilePropertiesAction myAntBuildFilePropertiesAction;
private ServerConfigurationManager myConfig;
private Repository repository;
private final TreeExpander myTreeExpander = new TreeExpander() {
public void expandAll() {
myBuilder.expandAll();
}
public boolean canExpand() {
final ServerConfigurationManager config = myConfig;
return config != null && config.getServerConfigurationList().size() != 0;
}
public void collapseAll() {
myBuilder.collapseAll();
}
public boolean canCollapse() {
return canExpand();
}
};
public SlingServerExplorer(final Project project) {
super(true, true);
setTransferHandler(new MyTransferHandler());
myProject = project;
myConfig = ServerConfigurationManager.getInstance(project);
final DefaultTreeModel model = new DefaultTreeModel(new DefaultMutableTreeNode());
myTree = new Tree(model);
// myTree.setRootVisible(false);
myTree.setRootVisible(true);
myTree.setShowsRootHandles(true);
myTree.setCellRenderer(new NodeRenderer());
myBuilder = new ServerExplorerTreeBuilder(project, myTree, model);
// myBuilder.setTargetsFiltered(AntConfigurationBase.getInstance(project).isFilterTargets());
TreeUtil.installActions(myTree);
new TreeSpeedSearch(myTree);
myTree.addMouseListener(new PopupHandler() {
public void invokePopup(final Component comp, final int x, final int y) {
popupInvoked(comp, x, y);
}
});
new DoubleClickListener() {
@Override
protected boolean onDoubleClick(MouseEvent e) {
final int eventY = e.getY();
final int row = myTree.getClosestRowForLocation(e.getX(), eventY);
if (row >= 0) {
final Rectangle bounds = myTree.getRowBounds(row);
if (bounds != null && eventY > bounds.getY() && eventY < bounds.getY() + bounds.getHeight()) {
runSelection(DataManager.getInstance().getDataContext(myTree));
return true;
}
}
return false;
}
}.installOn(myTree);
myTree.registerKeyboardAction(new AbstractAction() {
public void actionPerformed(ActionEvent e) {
runSelection(DataManager.getInstance().getDataContext(myTree));
}
}, KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0), WHEN_FOCUSED);
myTree.setLineStyleAngled();
// myAntBuildFilePropertiesAction = new AntBuildFilePropertiesAction(this);
setToolbar(createToolbarPanel());
setContent(ScrollPaneFactory.createScrollPane(myTree));
ToolTipManager.sharedInstance().registerComponent(myTree);
myKeymapListener = new KeymapListener();
DomManager.getDomManager(project).addDomEventListener(new DomEventListener() {
public void eventOccured(DomEvent event) {
myBuilder.queueUpdate();
}
}, this);
RunManagerEx.getInstanceEx(myProject).addRunManagerListener(new RunManagerAdapter() {
public void beforeRunTasksChanged() {
myBuilder.queueUpdate();
}
});
// Create FileVault Repository Access
LOGGER.debug("Before Create Repository Info");
RepositoryInfo repositoryInfo = new RepositoryInfo("admin", "admin", "http://localhost:4502/");
LOGGER.debug("After Create Repository Info: " + repositoryInfo);
RepositoryFactory factory = new VltRepositoryFactory();
LOGGER.debug("After Creating Repository Factory: " + factory);
try {
repository = factory.connectRepository(repositoryInfo);
LOGGER.debug("After Creating Repository: " + repository);
} catch (RepositoryException e) {
LOGGER.error("Failed to connect to VLT Repository", e);
}
VirtualFileManager.getInstance().addVirtualFileListener(new VirtualFileAdapter() {
@Override
public void propertyChanged(@NotNull VirtualFilePropertyEvent event) {
LOGGER.debug("VFS Property Changed Event: " + event.getFileName());
}
@Override
public void contentsChanged(@NotNull VirtualFileEvent event) {
String fileName = event.getFileName();
LOGGER.debug("VFS Content Changed Event: " + fileName);
String filePath = event.getFile().getPath();
int index = filePath.indexOf(ROOT_FOLDER);
if(index > 0) {
String jcrPath = filePath.substring(index + ROOT_FOLDER.length() - 1);
LOGGER.debug("Supported JCR Path: " + jcrPath);
if(repository != null) {
ResourceProxy resource = new ResourceProxy(jcrPath);
resource.addProperty("jcr:primaryType", "nt:unstructured");
FileInfo info = new FileInfo(
filePath, jcrPath, fileName
);
LOGGER.debug("Before Create Command");
Command<Void> cmd = repository.newAddOrUpdateNodeCommand(info, resource);
LOGGER.debug("Before Execute Create Command: " + cmd);
cmd.execute();
LOGGER.debug("After Execute Create Command: " + cmd);
}
}
}
@Override
public void fileCreated(@NotNull VirtualFileEvent event) {
LOGGER.debug("VFS File Created Event: " + event.getFileName());
}
@Override
public void fileDeleted(@NotNull VirtualFileEvent event) {
LOGGER.debug("VFS File Deleted Event: " + event.getFileName());
}
@Override
public void fileMoved(@NotNull VirtualFileMoveEvent event) {
LOGGER.debug("VFS File Moved Event: " + event.getFileName());
}
@Override
public void fileCopied(@NotNull VirtualFileCopyEvent event) {
LOGGER.debug("VFS File Copied Event: " + event.getFileName());
}
}, project);
}
public void dispose() {
final KeymapListener listener = myKeymapListener;
if (listener != null) {
myKeymapListener = null;
listener.stopListen();
}
final ServerExplorerTreeBuilder builder = myBuilder;
if (builder != null) {
Disposer.dispose(builder);
myBuilder = null;
}
final Tree tree = myTree;
if (tree != null) {
ToolTipManager.sharedInstance().unregisterComponent(tree);
for (KeyStroke keyStroke : tree.getRegisteredKeyStrokes()) {
tree.unregisterKeyboardAction(keyStroke);
}
myTree = null;
}
myProject = null;
// myConfig = null;
}
private JPanel createToolbarPanel() {
final DefaultActionGroup group = new DefaultActionGroup();
group.add(new AddAction());
group.add(new RemoveAction());
group.add(new RunAction());
// group.add(new ShowAllTargetsAction());
// AnAction action = CommonActionsManager.getInstance().createExpandAllAction(myTreeExpander, this);
// action.getTemplatePresentation().setDescription(AntBundle.message("ant.explorer.expand.all.nodes.action.description"));
// group.add(action);
// action = CommonActionsManager.getInstance().createCollapseAllAction(myTreeExpander, this);
// action.getTemplatePresentation().setDescription(AntBundle.message("ant.explorer.collapse.all.nodes.action.description"));
// group.add(action);
// group.add(myAntBuildFilePropertiesAction);
// group.add(new ContextHelpAction(HelpID.ANT));
final ActionToolbar actionToolBar = ActionManager.getInstance().createActionToolbar(ActionPlaces.ANT_EXPLORER_TOOLBAR, group, true);
final JPanel buttonsPanel = new JPanel(new BorderLayout());
buttonsPanel.add(actionToolBar.getComponent(), BorderLayout.CENTER);
return buttonsPanel;
}
private void addServerConfiguration() {
// final FileChooserDescriptor descriptor = createXmlDescriptor();
//// descriptor.setTitle(AntBundle.message("select.ant.build.file.dialog.title"));
//// descriptor.setDescription(AntBundle.message("select.ant.build.file.dialog.description"));
// final VirtualFile[] files = FileChooser.chooseFiles(descriptor, myProject, null);
// addBuildFile(files);
}
private void addBuildFile(final VirtualFile[] files) {
if (files.length == 0) {
return;
}
ApplicationManager.getApplication().invokeLater(new Runnable() {
public void run() {
// final AntConfiguration antConfiguration = myConfig;
// if (antConfiguration == null) {
// return;
// final java.util.List<VirtualFile> ignoredFiles = new ArrayList<VirtualFile>();
// for (VirtualFile file : files) {
// try {
// antConfiguration.addBuildFile(file);
// catch (AntNoFileException e) {
// ignoredFiles.add(e.getFile());
// if (ignoredFiles.size() != 0) {
// String messageText;
// final StringBuilder message = StringBuilderSpinAllocator.alloc();
// try {
// String separator = "";
// for (final VirtualFile virtualFile : ignoredFiles) {
// message.append(separator);
// message.append(virtualFile.getPresentableUrl());
// separator = "\n";
// messageText = message.toString();
// finally {
// StringBuilderSpinAllocator.dispose(message);
// Messages.showWarningDialog(myProject, messageText, AntBundle.message("cannot.add.ant.files.dialog.title"));
}
});
}
public void removeBuildFile() {
// final AntBuildFile buildFile = getCurrentBuildFile();
// if (buildFile == null) {
// return;
// final String fileName = buildFile.getPresentableUrl();
// final int result = Messages.showYesNoDialog(myProject, AntBundle.message("remove.the.reference.to.file.confirmation.text", fileName),
// AntBundle.message("confirm.remove.dialog.title"), Messages.getQuestionIcon());
// if (result != Messages.YES) {
// return;
// myConfig.removeBuildFile(buildFile);
}
public void setBuildFileProperties() {
// final AntBuildFileBase buildFile = getCurrentBuildFile();
// if (buildFile != null && BuildFilePropertiesPanel.editBuildFile(buildFile, myProject)) {
// myConfig.updateBuildFile(buildFile);
// myBuilder.queueUpdate();
// myTree.repaint();
}
private void runSelection(final DataContext dataContext) {
if (!canRunSelection()) {
return;
}
// final AntBuildFileBase buildFile = getCurrentBuildFile();
// if (buildFile != null) {
// final TreePath[] paths = myTree.getSelectionPaths();
// final String[] targets = getTargetNamesFromPaths(paths);
// ExecutionHandler.runBuild(buildFile, targets, null, dataContext, Collections.<BuildFileProperty>emptyList(), AntBuildListener.NULL);
}
private boolean canRunSelection() {
if (myTree == null) {
return false;
}
final TreePath[] paths = myTree.getSelectionPaths();
if (paths == null) {
return false;
}
// final AntBuildFile buildFile = getCurrentBuildFile();
// if (buildFile == null || !buildFile.exists()) {
// return false;
// for (final TreePath path : paths) {
// final DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent();
// final Object userObject = node.getUserObject();
// final AntBuildFileNodeDescriptor buildFileNodeDescriptor;
// if (userObject instanceof AntTargetNodeDescriptor) {
// buildFileNodeDescriptor = (AntBuildFileNodeDescriptor)((DefaultMutableTreeNode)node.getParent()).getUserObject();
// else if (userObject instanceof AntBuildFileNodeDescriptor){
// buildFileNodeDescriptor = (AntBuildFileNodeDescriptor)userObject;
// else {
// buildFileNodeDescriptor = null;
// if (buildFileNodeDescriptor == null || buildFileNodeDescriptor.getBuildFile() != buildFile) {
// return false;
return true;
}
private static String[] getTargetNamesFromPaths(TreePath[] paths) {
final java.util.List<String> targets = new ArrayList<String>();
for (final TreePath path : paths) {
final Object userObject = ((DefaultMutableTreeNode)path.getLastPathComponent()).getUserObject();
// if (!(userObject instanceof AntTargetNodeDescriptor)) {
// continue;
// final AntBuildTarget target = ((AntTargetNodeDescriptor)userObject).getTarget();
// if (target instanceof MetaTarget) {
// ContainerUtil.addAll(targets, ((MetaTarget) target).getTargetNames());
// else {
// targets.add(target.getName());
}
return ArrayUtil.toStringArray(targets);
}
// private static AntBuildTarget[] getTargetObjectsFromPaths(TreePath[] paths) {
// final java.util.List<AntBuildTargetBase> targets = new ArrayList<AntBuildTargetBase>();
// for (final TreePath path : paths) {
// final Object userObject = ((DefaultMutableTreeNode)path.getLastPathComponent()).getUserObject();
// if (!(userObject instanceof AntTargetNodeDescriptor)) {
// continue;
// final AntBuildTargetBase target = ((AntTargetNodeDescriptor)userObject).getTarget();
// targets.add(target);
// return targets.toArray(new AntBuildTargetBase[targets.size()]);
public boolean isBuildFileSelected() {
// if( myProject == null) return false;
// final AntBuildFileBase file = getCurrentBuildFile();
// return file != null && file.exists();
return false;
}
// @Nullable
// private AntBuildFileBase getCurrentBuildFile() {
// final AntBuildFileNodeDescriptor descriptor = getCurrentBuildFileNodeDescriptor();
// return (AntBuildFileBase)((descriptor == null) ? null : descriptor.getBuildFile());
// @Nullable
// private AntBuildFileNodeDescriptor getCurrentBuildFileNodeDescriptor() {
// if (myTree == null) {
// return null;
// final TreePath path = myTree.getSelectionPath();
// if (path == null) {
// return null;
// DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent();
// while (node != null) {
// final Object userObject = node.getUserObject();
// if (userObject instanceof AntBuildFileNodeDescriptor) {
// return (AntBuildFileNodeDescriptor)userObject;
// node = (DefaultMutableTreeNode)node.getParent();
// return null;
private void popupInvoked(final Component comp, final int x, final int y) {
Object userObject = null;
final TreePath path = myTree.getSelectionPath();
if (path != null) {
final DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent();
if (node != null) {
userObject = node.getUserObject();
}
}
final DefaultActionGroup group = new DefaultActionGroup();
group.add(new RunAction());
// group.add(new CreateMetaTargetAction());
group.add(new MakeAntRunConfigurationAction());
// group.add(new RemoveMetaTargetsOrBuildFileAction());
group.add(ActionManager.getInstance().getAction(IdeActions.ACTION_EDIT_SOURCE));
// if (userObject instanceof AntBuildFileNodeDescriptor) {
// group.add(new RemoveBuildFileAction(this));
// if (userObject instanceof AntTargetNodeDescriptor) {
// final AntBuildTargetBase target = ((AntTargetNodeDescriptor)userObject).getTarget();
// final DefaultActionGroup executeOnGroup =
// new DefaultActionGroup(AntBundle.message("ant.explorer.execute.on.action.group.name"), true);
// executeOnGroup.add(new ExecuteOnEventAction(target, ExecuteBeforeCompilationEvent.getInstance()));
// executeOnGroup.add(new ExecuteOnEventAction(target, ExecuteAfterCompilationEvent.getInstance()));
// executeOnGroup.addSeparator();
// executeOnGroup.add(new ExecuteBeforeRunAction(target));
// group.add(executeOnGroup);
// group.add(new AssignShortcutAction(target.getActionId()));
// group.add(myAntBuildFilePropertiesAction);
final ActionPopupMenu popupMenu = ActionManager.getInstance().createActionPopupMenu(ActionPlaces.ANT_EXPLORER_POPUP, group);
popupMenu.getComponent().show(comp, x, y);
}
@Nullable
public Object getData(@NonNls String dataId) {
if (CommonDataKeys.NAVIGATABLE.is(dataId)) {
// final AntBuildFile buildFile = getCurrentBuildFile();
// if (buildFile == null) {
// return null;
// final VirtualFile file = buildFile.getVirtualFile();
// if (file == null) {
// return null;
final TreePath treePath = myTree.getLeadSelectionPath();
if (treePath == null) {
return null;
}
final DefaultMutableTreeNode node = (DefaultMutableTreeNode)treePath.getLastPathComponent();
if (node == null) {
return null;
}
// if (node.getUserObject() instanceof AntTargetNodeDescriptor) {
// final AntTargetNodeDescriptor targetNodeDescriptor = (AntTargetNodeDescriptor)node.getUserObject();
// final AntBuildTargetBase buildTarget = targetNodeDescriptor.getTarget();
// final OpenFileDescriptor descriptor = buildTarget.getOpenFileDescriptor();
// if (descriptor != null) {
// final VirtualFile descriptorFile = descriptor.getFile();
// if (descriptorFile.isValid()) {
// return descriptor;
// if (file.isValid()) {
// return new OpenFileDescriptor(myProject, file);
}
else if (PlatformDataKeys.HELP_ID.is(dataId)) {
// return HelpID.ANT;
return null;
}
else if (PlatformDataKeys.TREE_EXPANDER.is(dataId)) {
return myProject != null? myTreeExpander : null;
}
else if (CommonDataKeys.VIRTUAL_FILE_ARRAY.is(dataId)) {
// final java.util.List<VirtualFile> virtualFiles = collectAntFiles(new Function<AntBuildFile, VirtualFile>() {
// @Override
// public VirtualFile fun(AntBuildFile buildFile) {
// final VirtualFile virtualFile = buildFile.getVirtualFile();
// if (virtualFile != null && virtualFile.isValid()) {
// return virtualFile;
// return null;
// return virtualFiles == null ? null : virtualFiles.toArray(new VirtualFile[virtualFiles.size()]);
return null;
}
else if (LangDataKeys.PSI_ELEMENT_ARRAY.is(dataId)) {
// final java.util.List<PsiElement> elements = collectAntFiles(new Function<AntBuildFile, PsiElement>() {
// @Override
// public PsiElement fun(AntBuildFile buildFile) {
// return buildFile.getAntFile();
// return elements == null ? null : elements.toArray(new PsiElement[elements.size()]);
return null;
}
return super.getData(dataId);
}
// private <T> java.util.List<T> collectAntFiles(final Function<AntBuildFile, T> function) {
// final TreePath[] paths = myTree.getSelectionPaths();
// if (paths == null) {
// return null;
// Set<AntBuildFile> antFiles = new LinkedHashSet<AntBuildFile>();
// for (final TreePath path : paths) {
// for (DefaultMutableTreeNode node = (DefaultMutableTreeNode)path.getLastPathComponent();
// node != null;
// node = (DefaultMutableTreeNode)node.getParent()) {
// final Object userObject = node.getUserObject();
// if (!(userObject instanceof AntBuildFileNodeDescriptor)) {
// continue;
// final AntBuildFile buildFile = ((AntBuildFileNodeDescriptor)userObject).getBuildFile();
// if (buildFile != null) {
// antFiles.add(buildFile);
// break;
// final java.util.List<T> result = new ArrayList<T>();
// ContainerUtil.addAllNotNull(result, ContainerUtil.map(antFiles, new Function<AntBuildFile, T>() {
// @Override
// public T fun(AntBuildFile buildFile) {
// return function.fun(buildFile);
// return result.isEmpty() ? null : result;
public static FileChooserDescriptor createXmlDescriptor() {
return new FileChooserDescriptor(true, false, false, false, false, true){
public boolean isFileVisible(VirtualFile file, boolean showHiddenFiles) {
boolean b = super.isFileVisible(file, showHiddenFiles);
if (!file.isDirectory()) {
b &= StdFileTypes.XML.equals(file.getFileType());
}
return b;
}
};
}
private static final class NodeRenderer extends ColoredTreeCellRenderer {
public void customizeCellRenderer(JTree tree,
Object value,
boolean selected,
boolean expanded,
boolean leaf,
int row,
boolean hasFocus) {
final Object userObject = ((DefaultMutableTreeNode)value).getUserObject();
LOGGER.debug("Node Renderer: user object: " + userObject);
if (userObject instanceof ServerNodeDescriptor) {
final ServerNodeDescriptor descriptor = (ServerNodeDescriptor)userObject;
descriptor.customize(this);
}
else {
append(tree.convertValueToText(value, selected, expanded, leaf, row, hasFocus), SimpleTextAttributes.REGULAR_ATTRIBUTES);
}
}
}
private final class AddAction extends AnAction {
public AddAction() {
// super(AntBundle.message("add.ant.file.action.name"), AntBundle.message("add.ant.file.action.description"), IconUtil.getAddIcon());
super("Add Action", "Add a New Server Configuration", IconUtil.getAddIcon());
}
public void actionPerformed(AnActionEvent e) {
ServerConfigurationDialog dialog = new ServerConfigurationDialog(e.getProject());
if (!dialog.showAndGet()) {
// historyService.setCanceledCommand(dialog.getGoals());
return;
}
ServerConfiguration serverConfiguration = dialog.getConfiguration();
myConfig.getServerConfigurationList().add(serverConfiguration);
myTree.repaint();
// addServerConfiguration();
}
}
private final class RemoveAction extends AnAction {
public RemoveAction() {
// super(AntBundle.message("remove.ant.file.action.name"), AntBundle.message("remove.ant.file.action.description"),
// IconUtil.getRemoveIcon());
super("Remove Action", "Description", IconUtil.getRemoveIcon());
}
public void actionPerformed(AnActionEvent e) {
removeBuildFile();
}
public void update(AnActionEvent event) {
// event.getPresentation().setEnabled(getCurrentBuildFile() != null);
}
}
private final class RunAction extends AnAction {
public RunAction() {
// super(AntBundle.message("run.ant.file.or.target.action.name"), AntBundle.message("run.ant.file.or.target.action.description"),
// AllIcons.Actions.Execute);
super("Run Action", "Hot Swap Class", AllIcons.Actions.Execute);
}
public void actionPerformed(AnActionEvent e) {
// runSelection(e.getDataContext());
// Create a Connection
// Create a Virtual Machine
// Load the Compiled Class
// Obtain the Reference Type
// Redefine the Class on the Remote Server
}
public void update(AnActionEvent event) {
final Presentation presentation = event.getPresentation();
// final String place = event.getPlace();
// if (ActionPlaces.ANT_EXPLORER_TOOLBAR.equals(place)) {
//// presentation.setText(AntBundle.message("run.ant.file.or.target.action.name"));
// else {
// final TreePath[] paths = myTree.getSelectionPaths();
// if (paths != null && paths.length == 1) {
// Object temp = ((DefaultMutableTreeNode)paths[0].getLastPathComponent()).getUserObject();
// LOGGER.debug("Selected User Object: '{}'", temp);
// if(temp instanceof SlingServerNodeDescriptor) {
// SlingServerNodeDescriptor node = (SlingServerNodeDescriptor) temp;
// ServerConfiguration serverConfiguration = node.getTarget();
// ServerUtil.connectRepository(serverConfiguration);
// } else {
// LOGGER.debug("Selected object is not a Server Configuration but: '{}'", temp);
//// presentation.setText(AntBundle.message("run.ant.build.action.name"));
//// else {
//// if (paths == null || paths.length == 1) {
//// presentation.setText(AntBundle.message("run.ant.target.action.name"));
//// else {
//// presentation.setText(AntBundle.message("run.ant.targets.action.name"));
// presentation.setEnabled(canRunSelection());
presentation.setEnabled(true);
}
}
private final class MakeAntRunConfigurationAction extends AnAction {
public MakeAntRunConfigurationAction() {
// super(AntBundle.message("make.ant.runconfiguration.name"), null, AntIcons.Build);
super("Make / Run Action", "Description", null);
}
@Override
public void update(AnActionEvent e) {
super.update(e);
final Presentation presentation = e.getPresentation();
presentation.setEnabled(myTree.getSelectionCount() == 1 && canRunSelection());
}
@Override
public void actionPerformed(AnActionEvent e) {
// final AntBuildFile buildFile = getCurrentBuildFile();
// if (buildFile == null || !buildFile.exists()) {
// return;
// TreePath selectionPath = myTree.getSelectionPath();
// if (selectionPath == null) return;
// final DefaultMutableTreeNode node = (DefaultMutableTreeNode) selectionPath.getLastPathComponent();
// final Object userObject = node.getUserObject();
// AntBuildTarget target = null;
// if (userObject instanceof AntTargetNodeDescriptor) {
// AntTargetNodeDescriptor targetNodeDescriptor = (AntTargetNodeDescriptor)userObject;
// target = targetNodeDescriptor.getTarget();
// else if (userObject instanceof AntBuildFileNodeDescriptor){
// AntBuildModel model = ((AntBuildFileNodeDescriptor)userObject).getBuildFile().getModel();
// target = model.findTarget(model.getDefaultTargetName());
// String name = target != null ? target.getDisplayName() : null;
// if (target == null || name == null) {
// return;
// RunManagerImpl runManager = (RunManagerImpl) RunManager.getInstance(e.getProject());
// RunnerAndConfigurationSettings settings =
// runManager.createRunConfiguration(name, AntRunConfigurationType.getInstance().getFactory());
// AntRunConfiguration configuration = (AntRunConfiguration)settings.getConfiguration();
// configuration.acceptSettings(target);
// if (RunDialog.editConfiguration(e.getProject(), settings, ExecutionBundle
// .message("create.run.configuration.for.item.dialog.title", configuration.getName()))) {
// runManager.addConfiguration(settings,
// runManager.isConfigurationShared(settings),
// runManager.getBeforeRunTasks(settings.getConfiguration()), false);
// runManager.setSelectedConfiguration(settings);
}
}
private final class ShowAllTargetsAction extends ToggleAction {
public ShowAllTargetsAction() {
// super(AntBundle.message("filter.ant.targets.action.name"), AntBundle.message("filter.ant.targets.action.description"),
// AllIcons.General.Filter);
super("Show ALl Taget Action", "Description", null);
}
public boolean isSelected(AnActionEvent event) {
// final Project project = myProject;
// return project != null? AntConfigurationBase.getInstance(project).isFilterTargets() : false;
return false;
}
public void setSelected(AnActionEvent event, boolean flag) {
setTargetsFiltered(flag);
}
}
private void setTargetsFiltered(boolean value) {
myBuilder.setTargetsFiltered(value);
// AntConfigurationBase.getInstance(myProject).setFilterTargets(value);
}
// private final class ExecuteOnEventAction extends ToggleAction {
// private final AntBuildTargetBase myTarget;
// private final ExecutionEvent myExecutionEvent;
// public ExecuteOnEventAction(final AntBuildTargetBase target, final ExecutionEvent executionEvent) {
// super(executionEvent.getPresentableName());
// myTarget = target;
// myExecutionEvent = executionEvent;
// public boolean isSelected(AnActionEvent e) {
// return myTarget.equals(AntConfigurationBase.getInstance(myProject).getTargetForEvent(myExecutionEvent));
// public void setSelected(AnActionEvent event, boolean state) {
// final AntConfigurationBase antConfiguration = AntConfigurationBase.getInstance(myProject);
// if (state) {
// final AntBuildFileBase buildFile =
// (AntBuildFileBase)((myTarget instanceof MetaTarget) ? ((MetaTarget)myTarget).getBuildFile() : myTarget.getModel().getBuildFile());
// antConfiguration.setTargetForEvent(buildFile, myTarget.getName(), myExecutionEvent);
// else {
// antConfiguration.clearTargetForEvent(myExecutionEvent);
// myBuilder.queueUpdate();
// public void update(AnActionEvent e) {
// super.update(e);
// final AntBuildFile buildFile = myTarget.getModel().getBuildFile();
// e.getPresentation().setEnabled(buildFile != null && buildFile.exists());
// private final class ExecuteBeforeRunAction extends AnAction {
// private final AntBuildTarget myTarget;
// public ExecuteBeforeRunAction(final AntBuildTarget target) {
// super(AntBundle.message("executes.before.run.debug.acton.name"));
// myTarget = target;
// public void actionPerformed(AnActionEvent e) {
// final AntExecuteBeforeRunDialog dialog = new AntExecuteBeforeRunDialog(myProject, myTarget);
// dialog.show();
// public void update(AnActionEvent e) {
// e.getPresentation().setEnabled(myTarget.getModel().getBuildFile().exists());
private final class CreateMetaTargetAction extends AnAction {
public CreateMetaTargetAction() {
// super(AntBundle.message("ant.create.meta.target.action.name"), AntBundle.message("ant.create.meta.target.action.description"), null
super("Create Meta Action", "Description", null);
/*IconLoader.getIcon("/actions/execute.png")*/
}
public void actionPerformed(AnActionEvent e) {
// final AntBuildFile buildFile = getCurrentBuildFile();
// final String[] targets = getTargetNamesFromPaths(myTree.getSelectionPaths());
// final ExecuteCompositeTargetEvent event = new ExecuteCompositeTargetEvent(targets);
// final SaveMetaTargetDialog dialog = new SaveMetaTargetDialog(myTree, event, AntConfigurationBase.getInstance(myProject), buildFile);
// dialog.setTitle(e.getPresentation().getText());
// if (dialog.showAndGet()) {
// myBuilder.queueUpdate();
// myTree.repaint();
}
public void update(AnActionEvent e) {
final TreePath[] paths = myTree.getSelectionPaths();
e.getPresentation().setEnabled(paths != null && paths.length > 1 && canRunSelection());
}
}
private final class RemoveMetaTargetsOrBuildFileAction extends AnAction {
public RemoveMetaTargetsOrBuildFileAction() {
// super(AntBundle.message("remove.meta.targets.action.name"), AntBundle.message("remove.meta.targets.action.description"), null);
super("Remove Meta Action", "Description", null);
registerCustomShortcutSet(CommonShortcuts.getDelete(), myTree);
Disposer.register(SlingServerExplorer.this, new Disposable() {
public void dispose() {
RemoveMetaTargetsOrBuildFileAction.this.unregisterCustomShortcutSet(myTree);
}
});
myTree.registerKeyboardAction(new AbstractAction() {
public void actionPerformed(ActionEvent e) {
doAction();
}
}, KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0), WHEN_ANCESTOR_OF_FOCUSED_COMPONENT);
}
public void actionPerformed(AnActionEvent e) {
doAction();
}
private void doAction() {
final TreePath[] paths = myTree.getSelectionPaths();
if (paths == null) {
return;
}
try {
// try to remove build file
if (paths.length == 1) {
final DefaultMutableTreeNode node = (DefaultMutableTreeNode)paths[0].getLastPathComponent();
// if (node.getUserObject() instanceof AntBuildFileNodeDescriptor) {
// final AntBuildFileNodeDescriptor descriptor = (AntBuildFileNodeDescriptor)node.getUserObject();
// if (descriptor.getBuildFile().equals(getCurrentBuildFile())) {
// removeBuildFile();
// return;
}
// try to remove meta targets
// final AntBuildTarget[] targets = getTargetObjectsFromPaths(paths);
// final AntConfigurationBase antConfiguration = AntConfigurationBase.getInstance(myProject);
// for (final AntBuildTarget buildTarget : targets) {
// if (buildTarget instanceof MetaTarget) {
// for (final ExecutionEvent event : antConfiguration.getEventsForTarget(buildTarget)) {
// if (event instanceof ExecuteCompositeTargetEvent) {
// antConfiguration.clearTargetForEvent(event);
}
finally {
myBuilder.queueUpdate();
myTree.repaint();
}
}
public void update(AnActionEvent e) {
final Presentation presentation = e.getPresentation();
final TreePath[] paths = myTree.getSelectionPaths();
if (paths == null) {
presentation.setEnabled(false);
return;
}
if (paths.length == 1) {
// String text = AntBundle.message("remove.meta.target.action.name");
// boolean enabled = false;
// final DefaultMutableTreeNode node = (DefaultMutableTreeNode)paths[0].getLastPathComponent();
// if (node.getUserObject() instanceof AntBuildFileNodeDescriptor) {
// final AntBuildFileNodeDescriptor descriptor = (AntBuildFileNodeDescriptor)node.getUserObject();
// if (descriptor.getBuildFile().equals(getCurrentBuildFile())) {
// text = AntBundle.message("remove.selected.build.file.action.name");
// enabled = true;
// else {
// if (node.getUserObject() instanceof AntTargetNodeDescriptor) {
// final AntTargetNodeDescriptor descr = (AntTargetNodeDescriptor)node.getUserObject();
// final AntBuildTargetBase target = descr.getTarget();
// if (target instanceof MetaTarget) {
// enabled = true;
// presentation.setText(text);
// presentation.setEnabled(enabled);
}
else {
// presentation.setText(AntBundle.message("remove.selected.meta.targets.action.name"));
// final AntBuildTarget[] targets = getTargetObjectsFromPaths(paths);
// boolean enabled = targets.length > 0;
// for (final AntBuildTarget buildTarget : targets) {
// if (!(buildTarget instanceof MetaTarget)) {
// enabled = false;
// break;
// presentation.setEnabled(enabled);
}
}
}
private final class AssignShortcutAction extends AnAction {
private final String myActionId;
public AssignShortcutAction(String actionId) {
// super(AntBundle.message("ant.explorer.assign.shortcut.action.name"));
super("Assign Shortcut Action", "Description", null);
myActionId = actionId;
}
public void actionPerformed(AnActionEvent e) {
new EditKeymapsDialog(myProject, myActionId).show();
}
public void update(AnActionEvent e) {
e.getPresentation().setEnabled(myActionId != null && ActionManager.getInstance().getAction(myActionId) != null);
}
}
private class KeymapListener implements KeymapManagerListener, Keymap.Listener {
private Keymap myCurrentKeymap = null;
public KeymapListener() {
final KeymapManagerEx keymapManager = KeymapManagerEx.getInstanceEx();
final Keymap activeKeymap = keymapManager.getActiveKeymap();
listenTo(activeKeymap);
keymapManager.addKeymapManagerListener(this);
}
public void activeKeymapChanged(Keymap keymap) {
listenTo(keymap);
updateTree();
}
private void listenTo(Keymap keymap) {
if (myCurrentKeymap != null) {
myCurrentKeymap.removeShortcutChangeListener(this);
}
myCurrentKeymap = keymap;
if (myCurrentKeymap != null) {
myCurrentKeymap.addShortcutChangeListener(this);
}
}
private void updateTree() {
myBuilder.updateFromRoot();
}
public void onShortcutChanged(String actionId) {
updateTree();
}
public void stopListen() {
listenTo(null);
KeymapManagerEx.getInstanceEx().removeKeymapManagerListener(this);
}
}
private final class MyTransferHandler extends TransferHandler {
@Override
public boolean importData(final TransferSupport support) {
if (canImport(support)) {
addBuildFile(getAntFiles(support));
return true;
}
return false;
}
@Override
public boolean canImport(final TransferSupport support) {
return FileCopyPasteUtil.isFileListFlavorAvailable(support.getDataFlavors());
}
private VirtualFile[] getAntFiles(final TransferSupport support) {
java.util.List<VirtualFile> virtualFileList = new ArrayList<VirtualFile>();
final java.util.List<File> fileList = FileCopyPasteUtil.getFileList(support.getTransferable());
if (fileList != null) {
for (File file : fileList ) {
ContainerUtil.addIfNotNull(virtualFileList, VfsUtil.findFileByIoFile(file, true));
}
}
return VfsUtil.toVirtualFileArray(virtualFileList);
}
}
}
|
package com.pauldavdesign.mineauz.minigames.blockRecorder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.block.Block;
import org.bukkit.block.BlockState;
import org.bukkit.block.BrewingStand;
import org.bukkit.block.Chest;
import org.bukkit.block.Dispenser;
import org.bukkit.block.DoubleChest;
import org.bukkit.block.Furnace;
import org.bukkit.block.Sign;
import org.bukkit.entity.Animals;
import org.bukkit.entity.Arrow;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.block.Action;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockBurnEvent;
import org.bukkit.event.block.BlockFromToEvent;
import org.bukkit.event.block.BlockIgniteEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.block.BlockSpreadEvent;
import org.bukkit.event.block.LeavesDecayEvent;
import org.bukkit.event.block.BlockIgniteEvent.IgniteCause;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.hanging.HangingBreakByEntityEvent;
import org.bukkit.event.hanging.HangingPlaceEvent;
import org.bukkit.event.player.PlayerBucketEmptyEvent;
import org.bukkit.event.player.PlayerBucketFillEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.event.vehicle.VehicleCreateEvent;
import org.bukkit.event.vehicle.VehicleDestroyEvent;
import org.bukkit.event.world.StructureGrowEvent;
import org.bukkit.inventory.ItemStack;
import com.pauldavdesign.mineauz.minigames.Minigame;
import com.pauldavdesign.mineauz.minigames.MinigameUtils;
import com.pauldavdesign.mineauz.minigames.Minigames;
import com.pauldavdesign.mineauz.minigames.PlayerData;
public class RecorderData implements Listener{
private static Minigames plugin;
private PlayerData pdata;
private Minigame minigame;
private boolean whitelistMode = false;
private List<Material> wbBlocks = new ArrayList<Material>();
private Map<String, BlockData> blockdata;
private Map<Integer, EntityData> entdata;
public RecorderData(Minigame minigame){
plugin = Minigames.plugin;
pdata = plugin.pdata;
this.minigame = minigame;
blockdata = new HashMap<String, BlockData>();
entdata = new HashMap<Integer, EntityData>();
plugin.getServer().getPluginManager().registerEvents(this, plugin);
}
public void setWhitelistMode(boolean bool){
whitelistMode = bool;
}
public boolean getWhitelistMode(){
return whitelistMode;
}
public void addWBBlock(Material mat){
wbBlocks.add(mat);
}
public List<Material> getWBBlocks(){
return wbBlocks;
}
public boolean removeWBBlock(Material mat){
if(wbBlocks.contains(mat)){
wbBlocks.remove(mat);
return true;
}
return false;
}
public Minigame getMinigame(){
return minigame;
}
public BlockData addBlock(Block block, Player modifier){
BlockData bdata = new BlockData(block, modifier);
String sloc = String.valueOf(bdata.getLocation().getBlockX()) + ":" + bdata.getLocation().getBlockY() + ":" + bdata.getLocation().getBlockZ();
if(!blockdata.containsKey(sloc)){
ItemStack[] items = null;
if(block.getType() == Material.CHEST){
if(block instanceof DoubleChest){
DoubleChest dchest = (DoubleChest) block.getState();
items = new ItemStack[dchest.getInventory().getContents().length];
for(int i = 0; i < items.length; i++){
if(dchest.getInventory().getItem(i) != null){
items[i] = dchest.getInventory().getItem(i).clone();
}
}
}
else{
Chest chest = (Chest) block.getState();
items = new ItemStack[chest.getInventory().getContents().length];
for(int i = 0; i < items.length; i++){
if(chest.getInventory().getItem(i) != null){
items[i] = chest.getInventory().getItem(i).clone();
}
}
}
}
else if(block.getType() == Material.FURNACE){
Furnace furnace = (Furnace) block.getState();
items = new ItemStack[furnace.getInventory().getContents().length];
for(int i = 0; i < items.length; i++){
if(furnace.getInventory().getItem(i) != null){
items[i] = furnace.getInventory().getItem(i).clone();
}
}
}
else if(block.getType() == Material.BREWING_STAND){
BrewingStand stand = (BrewingStand) block.getState();
items = new ItemStack[stand.getInventory().getContents().length];
for(int i = 0; i < items.length; i++){
if(stand.getInventory().getItem(i) != null){
items[i] = stand.getInventory().getItem(i).clone();
}
}
}
else if(block.getType() == Material.DISPENSER){
Dispenser dispenser = (Dispenser) block.getState();
items = new ItemStack[dispenser.getInventory().getContents().length];
for(int i = 0; i < items.length; i++){
if(dispenser.getInventory().getItem(i) != null){
items[i] = dispenser.getInventory().getItem(i).clone();
}
}
}
bdata.setItems(items);
blockdata.put(sloc, bdata);
return bdata;
}
else{
blockdata.get(sloc).setModifier(modifier);
return blockdata.get(sloc);
}
}
public void addBlock(BlockState block, Player modifier){
BlockData bdata = new BlockData(block, modifier);
String sloc = String.valueOf(bdata.getLocation().getBlockX()) + ":" + bdata.getLocation().getBlockY() + ":" + bdata.getLocation().getBlockZ();
if(!blockdata.containsKey(sloc)){
ItemStack[] items = null;
if(block.getType() == Material.CHEST){
if(block instanceof DoubleChest){
DoubleChest dchest = (DoubleChest) block;
items = new ItemStack[dchest.getInventory().getContents().length];
for(int i = 0; i < items.length; i++){
if(dchest.getInventory().getItem(i) != null){
items[i] = dchest.getInventory().getItem(i).clone();
}
}
}
else{
Chest chest = (Chest) block;
items = new ItemStack[chest.getInventory().getContents().length];
for(int i = 0; i < items.length; i++){
if(chest.getInventory().getItem(i) != null){
items[i] = chest.getInventory().getItem(i).clone();
}
}
}
}
else if(block.getType() == Material.FURNACE){
Furnace furnace = (Furnace) block;
items = new ItemStack[furnace.getInventory().getContents().length];
for(int i = 0; i < items.length; i++){
if(furnace.getInventory().getItem(i) != null){
items[i] = furnace.getInventory().getItem(i).clone();
}
}
}
else if(block.getType() == Material.BREWING_STAND){
BrewingStand stand = (BrewingStand) block;
items = new ItemStack[stand.getInventory().getContents().length];
for(int i = 0; i < items.length; i++){
if(stand.getInventory().getItem(i) != null){
items[i] = stand.getInventory().getItem(i).clone();
}
}
}
else if(block.getType() == Material.DISPENSER){
Dispenser dispenser = (Dispenser) block;
items = new ItemStack[dispenser.getInventory().getContents().length];
for(int i = 0; i < items.length; i++){
if(dispenser.getInventory().getItem(i) != null){
items[i] = dispenser.getInventory().getItem(i).clone();
}
}
}
bdata.setItems(items);
blockdata.put(sloc, bdata);
}
else{
blockdata.get(sloc).setModifier(modifier);
}
}
public void addEntity(Entity ent, Player player, boolean created){
EntityData edata = new EntityData(ent, player, created);
entdata.put(ent.getEntityId(), edata);
}
public boolean hasEntity(Entity ent){
if(entdata.containsKey(ent.getEntityId())){
return true;
}
return false;
}
public boolean hasBlock(Block block){
String sloc = String.valueOf(block.getLocation().getBlockX()) + ":" + block.getLocation().getBlockY() + ":" + block.getLocation().getBlockZ();
if(blockdata.containsKey(sloc)){
return true;
}
return false;
}
public void restoreBlocks(){
for(String id : blockdata.keySet()){
final BlockData bdata = blockdata.get(id);
if(bdata.getLocation().getBlock().getType() == Material.CHEST){
if(bdata.getLocation().getBlock().getState() instanceof DoubleChest){
DoubleChest dchest = (DoubleChest) bdata.getLocation().getBlock().getState();
dchest.getInventory().clear();
}
else{
Chest chest = (Chest) bdata.getLocation().getBlock().getState();
chest.getInventory().clear();
}
}
else if(bdata.getLocation().getBlock().getType() == Material.FURNACE){
Furnace furnace = (Furnace) bdata.getLocation().getBlock().getState();
furnace.getInventory().clear();
}
else if(bdata.getLocation().getBlock().getType() == Material.DISPENSER){
Dispenser dispenser = (Dispenser) bdata.getLocation().getBlock().getState();
dispenser.getInventory().clear();
}
else if(bdata.getLocation().getBlock().getType() == Material.BREWING_STAND){
BrewingStand stand = (BrewingStand) bdata.getLocation().getBlock().getState();
stand.getInventory().clear();
}
Bukkit.getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() {
@Override
public void run() {
bdata.getLocation().getBlock().setType(bdata.getBlockState().getType());
bdata.getLocation().getBlock().setData(bdata.getBlockState().getRawData());
if(bdata.getLocation().getBlock().getType() == Material.CHEST){
if(bdata.getLocation().getBlock().getState() instanceof DoubleChest){
DoubleChest dchest = (DoubleChest) bdata.getLocation().getBlock().getState();
if(bdata.getItems() != null){
dchest.getInventory().setContents(bdata.getItems().clone());
}
}
else{
Chest chest = (Chest) bdata.getLocation().getBlock().getState();
if(bdata.getItems() != null){
chest.getInventory().setContents(bdata.getItems().clone());
}
}
}
else if(bdata.getLocation().getBlock().getType() == Material.FURNACE){
Furnace furnace = (Furnace) bdata.getLocation().getBlock().getState();
if(bdata.getItems() != null){
furnace.getInventory().setContents(bdata.getItems().clone());
}
}
else if(bdata.getLocation().getBlock().getType() == Material.BREWING_STAND){
BrewingStand bstand = (BrewingStand) bdata.getLocation().getBlock().getState();
if(bdata.getItems() != null){
bstand.getInventory().setContents(bdata.getItems().clone());
}
}
else if(bdata.getLocation().getBlock().getType() == Material.DISPENSER){
Dispenser dispenser = (Dispenser) bdata.getLocation().getBlock().getState();
if(bdata.getItems() != null){
dispenser.getInventory().setContents(bdata.getItems().clone());
}
}
}
});
}
blockdata.clear();
}
public void restoreEntities(){
for(Integer entID : entdata.keySet()){
if(entdata.get(entID).getEntity().isValid()){
if(entdata.get(entID).wasCreated()){
entdata.get(entID).getEntity().remove();
}
}
else{
entdata.get(entID).getEntityLocation().getWorld().spawnEntity(entdata.get(entID).getEntityLocation(),
entdata.get(entID).getEntityType());
}
}
entdata.clear();
}
public void restoreBlocks(Player modifier){
List<String> changes = new ArrayList<String>();
for(String id : blockdata.keySet()){
BlockData bdata = blockdata.get(id);
if(bdata.getModifier() == modifier){
if(bdata.getLocation().getBlock().getType() == Material.CHEST){
if(bdata.getLocation().getBlock().getState() instanceof DoubleChest){
DoubleChest dchest = (DoubleChest) bdata.getLocation().getBlock().getState();
dchest.getInventory().clear();
}
else{
Chest chest = (Chest) bdata.getLocation().getBlock().getState();
chest.getInventory().clear();
}
}
else if(bdata.getLocation().getBlock().getType() == Material.FURNACE){
Furnace furnace = (Furnace) bdata.getLocation().getBlock().getState();
furnace.getInventory().clear();
}
else if(bdata.getLocation().getBlock().getType() == Material.DISPENSER){
Dispenser dispenser = (Dispenser) bdata.getLocation().getBlock().getState();
dispenser.getInventory().clear();
}
else if(bdata.getLocation().getBlock().getType() == Material.BREWING_STAND){
BrewingStand stand = (BrewingStand) bdata.getLocation().getBlock().getState();
stand.getInventory().clear();
}
if(bdata.getLocation().getBlock().getType() != bdata.getBlockState().getType()){
bdata.getLocation().getBlock().setType(bdata.getBlockState().getType());
}
bdata.getLocation().getBlock().setData(bdata.getBlockState().getRawData());
changes.add(id);
if(bdata.getLocation().getBlock().getType() == Material.CHEST){
if(bdata.getLocation().getBlock().getState() instanceof DoubleChest){
DoubleChest dchest = (DoubleChest) bdata.getLocation().getBlock().getState();
if(bdata.getItems() != null){
dchest.getInventory().setContents(bdata.getItems().clone());
}
}
else{
Chest chest = (Chest) bdata.getLocation().getBlock().getState();
if(bdata.getItems() != null){
chest.getInventory().setContents(bdata.getItems().clone());
}
}
}
else if(bdata.getLocation().getBlock().getType() == Material.FURNACE){
Furnace furnace = (Furnace) bdata.getLocation().getBlock().getState();
if(bdata.getItems() != null){
furnace.getInventory().setContents(bdata.getItems().clone());
}
}
else if(bdata.getLocation().getBlock().getType() == Material.BREWING_STAND){
BrewingStand bstand = (BrewingStand) bdata.getLocation().getBlock().getState();
if(bdata.getItems() != null){
bstand.getInventory().setContents(bdata.getItems().clone());
}
}
else if(bdata.getLocation().getBlock().getType() == Material.DISPENSER){
Dispenser dispenser = (Dispenser) bdata.getLocation().getBlock().getState();
if(bdata.getItems() != null){
dispenser.getInventory().setContents(bdata.getItems().clone());
}
}
}
}
for(String id : changes){
blockdata.remove(id);
}
}
public void restoreEntities(Player player){
List<Integer> removal = new ArrayList<Integer>();
for(Integer entID : entdata.keySet()){
if(entdata.get(entID).getEntity().isValid() && entdata.get(entID).getModifier() == player){
if(entdata.get(entID).wasCreated()){
entdata.get(entID).getEntity().remove();
removal.add(entID);
}
}
else if(entdata.get(entID).getModifier() == player){
entdata.get(entID).getEntityLocation().getWorld().spawnEntity(entdata.get(entID).getEntityLocation(),
entdata.get(entID).getEntityType());
removal.add(entID);
}
}
for(Integer entID : removal){
entdata.remove(entID);
}
}
public boolean hasData(){
if(blockdata.isEmpty() && entdata.isEmpty())
return false;
return true;
}
// public boolean checkBlockSides(Location location){
// Location temp = location.clone();
// temp.setX(temp.getX() - 4);
// temp.setY(temp.getY() - 4);
// temp.setZ(temp.getZ() - 4);
// for(int y = 0; y < 8; y++){
// for(int x = 0; x < 8; x++){
// for(int z = 0; z < 8; z++){
// if(hasBlock(temp.getBlock())){
// return true;
// temp.setZ(temp.getZ() + 1);
// if(hasBlock(temp.getBlock())){
// return true;
// temp.setZ(temp.getZ() - 8);
// temp.setX(temp.getX() + 1);
// temp.setX(temp.getX() - 8);
// temp.setY(temp.getY() + 1);
// return false;
public boolean checkBlockSides(Location location){
Location temp = location.clone();
temp.setX(temp.getX() - 1);
temp.setY(temp.getY() - 1);
temp.setZ(temp.getZ() - 1);
for(int y = 0; y < 2; y++){
for(int x = 0; x < 2; x++){
for(int z = 0; z < 2; z++){
if(hasBlock(temp.getBlock())){
return true;
}
temp.setZ(temp.getZ() + 1);
}
if(hasBlock(temp.getBlock())){
return true;
}
temp.setZ(temp.getZ() - 2);
temp.setX(temp.getX() + 1);
}
temp.setX(temp.getX() - 2);
temp.setY(temp.getY() + 1);
}
return false;
}
@EventHandler(priority = EventPriority.HIGH)
private void blockBreak(BlockBreakEvent event){
Player ply = event.getPlayer();
if(pdata.playerInMinigame(ply) && pdata.getPlayersMinigame(ply).equals(minigame.getName())){
if(((whitelistMode && getWBBlocks().contains(event.getBlock().getType())) ||
(!whitelistMode && !getWBBlocks().contains(event.getBlock().getType()))) &&
minigame.canBlockBreak()){
if(event.getBlock().getState() instanceof Sign){
Sign sign = (Sign) event.getBlock().getState();
if(sign.getLine(0).equalsIgnoreCase(ChatColor.DARK_BLUE + "[Minigame]")){
event.setCancelled(true);
}
else{
addBlock(event.getBlock(), ply);
if(!minigame.canBlocksdrop()){
event.setCancelled(true);
event.getBlock().setType(Material.AIR);
}
}
}
else{
Location above = event.getBlock().getLocation().clone();
above.setY(above.getY() + 1);
addBlock(event.getBlock(), ply);
if(above.getBlock().getType() == Material.GRAVEL ||
above.getBlock().getType() == Material.SAND ||
above.getBlock().getType() == Material.ANVIL ||
above.getBlock().getType() == Material.DRAGON_EGG){
addBlock(above.getBlock(), ply);
}
if(!minigame.canBlocksdrop()){
event.setCancelled(true);
event.getBlock().setType(Material.AIR);
}
}
}
else{
event.setCancelled(true);
}
}
}
@EventHandler(priority = EventPriority.HIGH)
private void blockPlace(BlockPlaceEvent event){
Player ply = event.getPlayer();
if(pdata.playerInMinigame(ply) && pdata.getPlayersMinigame(ply).equals(minigame.getName()) && !event.isCancelled()){
if(((whitelistMode && getWBBlocks().contains(event.getBlock().getType())) ||
(!whitelistMode && !getWBBlocks().contains(event.getBlock().getType()))) &&
minigame.canBlockPlace()){
addBlock(event.getBlockReplacedState(), ply);
}
else{
event.setCancelled(true);
}
}
}
@EventHandler
private void takeItem(PlayerInteractEvent event){
Player ply = (Player) event.getPlayer();
if(pdata.playerInMinigame(ply) && pdata.getPlayersMinigame(ply).equals(minigame.getName()) && event.getAction() == Action.RIGHT_CLICK_BLOCK
&& !minigame.isSpectator(ply)){
if(event.getClickedBlock().getType() == Material.CHEST){
Chest chest = (Chest) event.getClickedBlock().getState();
if(chest.getInventory().getSize() > 27){
Location loc = event.getClickedBlock().getLocation().clone();
boolean isLeft = false;
//West = -z; East = +z; North = +x; South = -x;
if(!isLeft && event.getClickedBlock().getData() == 0x2){
loc.setX(loc.getX() + 1);
if(loc.getBlock().getType() == Material.CHEST){
Bukkit.getLogger().info(MinigameUtils.createLocationID(loc));
isLeft = true;
}
else{
loc = event.getClickedBlock().getLocation().clone();
}
}
else if(!isLeft && event.getClickedBlock().getData() == 0x3){
loc.setX(loc.getX() - 1);
if(loc.getBlock().getType() == Material.CHEST){
Bukkit.getLogger().info(MinigameUtils.createLocationID(loc));
isLeft = true;
}
else{
loc = event.getClickedBlock().getLocation().clone();
}
}
else if(!isLeft && event.getClickedBlock().getData() == 0x4){
loc.setZ(loc.getZ() - 1);
if(loc.getBlock().getType() == Material.CHEST){
Bukkit.getLogger().info(MinigameUtils.createLocationID(loc));
isLeft = true;
}
else{
loc = event.getClickedBlock().getLocation().clone();
}
}
else if(!isLeft && event.getClickedBlock().getData() == 0x5){
loc.setZ(loc.getZ() + 1);
if(loc.getBlock().getType() == Material.CHEST){
Bukkit.getLogger().info(MinigameUtils.createLocationID(loc));
isLeft = true;
}
else{
loc = event.getClickedBlock().getLocation().clone();
}
}
BlockData bdata = addBlock(loc.getBlock(), ply);
if(minigame.isRandomizeChests()){
bdata.randomizeContents(minigame.getMinChestRandom(), minigame.getMaxChestRandom());
}
}
else if(event.getClickedBlock().getState() instanceof Chest){
BlockData bdata = addBlock(event.getClickedBlock().getLocation().getBlock(), ply);
if(minigame.isRandomizeChests()){
bdata.randomizeContents(minigame.getMinChestRandom(), minigame.getMaxChestRandom());
}
}
}
else if(event.getClickedBlock().getType() == Material.FURNACE){
addBlock(event.getClickedBlock().getLocation().getBlock(), ply);
}
else if(event.getClickedBlock().getType() == Material.BREWING_STAND){
addBlock(event.getClickedBlock().getLocation().getBlock(), ply);
}
else if(event.getClickedBlock().getType() == Material.DISPENSER){
addBlock(event.getClickedBlock().getLocation().getBlock(), ply);
}
}
}
// @EventHandler
// private void blockPhysics(BlockPhysicsEvent event){
// if((event.getBlock().getType() == Material.GRAVEL ||
// event.getBlock().getType() == Material.SAND ||
// event.getBlock().getType() == Material.ANVIL ||
// event.getBlock().getType() == Material.DRAGON_EGG) &&
// checkBlockSides(event.getBlock().getLocation())){
// addBlock(event.getBlock(), null);
@EventHandler
private void leafDecay(LeavesDecayEvent event){
if(checkBlockSides(event.getBlock().getLocation())){
addBlock(event.getBlock(), null);
}
}
@EventHandler
private void treeGrow(StructureGrowEvent event){
if(hasBlock(event.getLocation().getBlock())){
for(BlockState block : event.getBlocks()){
addBlock(block.getLocation().getBlock(), event.getPlayer());
}
}
}
@EventHandler
private void bucketFill(PlayerBucketFillEvent event){
if(pdata.playerInMinigame(event.getPlayer()) && pdata.getPlayersMinigame(event.getPlayer()).equals(minigame.getName())){
if(((whitelistMode && getWBBlocks().contains(event.getBlockClicked().getType())) ||
(!whitelistMode && !getWBBlocks().contains(event.getBlockClicked().getType()))) &&
minigame.canBlockBreak()){
addBlock(event.getBlockClicked(), event.getPlayer());
}
else{
event.setCancelled(true);
}
}
}
@EventHandler
private void bucketEmpty(PlayerBucketEmptyEvent event){
if(pdata.playerInMinigame(event.getPlayer()) && pdata.getPlayersMinigame(event.getPlayer()).equals(minigame.getName())){
if(((whitelistMode && getWBBlocks().contains(event.getBlockClicked().getType())) ||
(!whitelistMode && !getWBBlocks().contains(event.getBlockClicked().getType()))) &&
minigame.canBlockPlace()){
Location loc = new Location(event.getBlockClicked().getWorld(),
event.getBlockFace().getModX() + event.getBlockClicked().getX(),
event.getBlockFace().getModY() + event.getBlockClicked().getY(),
event.getBlockFace().getModZ() + event.getBlockClicked().getZ());
addBlock(loc.getBlock(), event.getPlayer());
}
else{
event.setCancelled(true);
}
}
}
@EventHandler
public void blockFromTo(BlockFromToEvent event){
if(checkBlockSides(event.getBlock().getLocation())){
addBlock(event.getToBlock(), null);
}
}
@EventHandler
public void blockBurn(BlockBurnEvent event){
if(checkBlockSides(event.getBlock().getLocation())){
addBlock(event.getBlock(), null);
}
}
@EventHandler
public void fireSpread(BlockSpreadEvent event){
if(hasBlock(event.getSource())){
addBlock(event.getBlock(), null);
}
}
@EventHandler
public void igniteblock(BlockIgniteEvent event){
if(event.getPlayer() != null && pdata.playerInMinigame(event.getPlayer()) &&
pdata.getPlayersMinigame(event.getPlayer()).equals(minigame.getName()) &&
(event.getCause() == IgniteCause.FIREBALL || event.getCause() == IgniteCause.FLINT_AND_STEEL)){
if(((whitelistMode && getWBBlocks().contains(Material.FIRE)) ||
(!whitelistMode && !getWBBlocks().contains(Material.FIRE))) &&
minigame.canBlockPlace()){
addBlock(event.getBlock(), event.getPlayer());
}
else{
event.setCancelled(true);
}
}
}
@EventHandler
private void vehicleCreate(VehicleCreateEvent event){
List<Entity> ents = event.getVehicle().getNearbyEntities(8, 8, 8);
for(Entity ent : ents){
if(ent instanceof Player){
Player ply = (Player) ent;
if(plugin.pdata.playerInMinigame(ply) && plugin.mdata.getMinigame(plugin.pdata.getPlayersMinigame(ply)).equals(minigame)){
addEntity(event.getVehicle(), ply, true);
break;
}
}
}
}
@EventHandler
private void vehicleDestroy(VehicleDestroyEvent event){
List<Entity> ents = event.getVehicle().getNearbyEntities(15, 15, 15);
if(event.getAttacker() == null){
for(Entity ent : ents){
if(ent instanceof Player){
Player ply = (Player) ent;
if(plugin.pdata.playerInMinigame(ply) && plugin.mdata.getMinigame(plugin.pdata.getPlayersMinigame(ply)).equals(minigame)){
if(!hasEntity(event.getVehicle())){
addEntity(event.getVehicle(), ply, false);
}
break;
}
}
}
}
else{
if(event.getAttacker() instanceof Player){
Player ply = (Player) event.getAttacker();
if(plugin.pdata.playerInMinigame(ply) && plugin.mdata.getMinigame(plugin.pdata.getPlayersMinigame(ply)).equals(minigame)){
if(!hasEntity(event.getVehicle())){
addEntity(event.getVehicle(), ply, false);
}
}
}
}
}
@EventHandler
private void animalDeath(EntityDamageByEntityEvent event){
if(event.getEntity() instanceof Animals){
Animals animal = (Animals) event.getEntity();
if(animal.getHealth() <= event.getDamage()){
Player ply = null;
if(event.getDamager() instanceof Player){
ply = (Player) event.getDamager();
}
else if(event.getDamager() instanceof Arrow){
Arrow arr = (Arrow) event.getDamager();
if(arr.getShooter() instanceof Player){
ply = (Player) arr.getShooter();
}
}
if(ply != null){
if(plugin.pdata.playerInMinigame(ply) && plugin.mdata.getMinigame(plugin.pdata.getPlayersMinigame(ply)).equals(minigame)){
addEntity(animal, ply, false);
}
}
}
}
}
@EventHandler
private void paintingPlace(HangingPlaceEvent event){
Player ply = event.getPlayer();
if(plugin.pdata.playerInMinigame(ply) && plugin.mdata.getMinigame(plugin.pdata.getPlayersMinigame(ply)).equals(minigame)){
if(((whitelistMode && getWBBlocks().contains(Material.PAINTING)) ||
(!whitelistMode && !getWBBlocks().contains(Material.PAINTING))) ||
((whitelistMode && getWBBlocks().contains(Material.ITEM_FRAME)) ||
(!whitelistMode && !getWBBlocks().contains(Material.ITEM_FRAME)))){
addEntity(event.getEntity(), ply, true);
}
else{
event.setCancelled(true);
}
}
}
@EventHandler
private void paintingBreak(HangingBreakByEntityEvent event){
Player ply = null;
if(event.getRemover() instanceof Player){
ply = (Player) event.getRemover();
}
else if(event.getRemover() instanceof Arrow){
if(((Arrow)event.getRemover()).getShooter() instanceof Player){
ply = (Player)((Arrow)event.getRemover()).getShooter();
}
}
if(ply != null){
if(plugin.pdata.playerInMinigame(ply) && plugin.mdata.getMinigame(plugin.pdata.getPlayersMinigame(ply)).equals(minigame)){
event.setCancelled(true);
}
}
}
// @EventHandler
// private void blockForm(EntityBlockFormEvent event){
// String idloc = MinigameUtils.createLocationID(event.getBlock().getLocation());
// int y = event.getBlock().getY();
// int x = event.getBlock().getX();
// int z = event.getBlock().getZ();
// String world = event.getBlock().getWorld().getName();
// while(y < 256){
// idloc = x + ":" + y + ":" + z + ":" + world;
// Bukkit.getLogger().info(idloc);
// if(blockdata.containsKey(idloc)){
// addBlock(event.getBlock().getLocation().getBlock(), null);
// return;
}
|
package com.dmdirc.parser.irc;
import com.dmdirc.parser.common.MyInfo;
import com.dmdirc.parser.common.ChannelListModeItem;
import com.dmdirc.parser.common.ParserError;
import com.dmdirc.harness.parser.TestIPrivateCTCP;
import com.dmdirc.harness.parser.TestParser;
import com.dmdirc.harness.parser.TestIConnectError;
import com.dmdirc.harness.parser.TestINoticeAuth;
import com.dmdirc.harness.parser.TestINumeric;
import com.dmdirc.harness.parser.TestIServerError;
import com.dmdirc.harness.parser.TestIPost005;
import com.dmdirc.harness.parser.TestIPrivateMessage;
import com.dmdirc.harness.parser.TestIPrivateAction;
import com.dmdirc.parser.interfaces.callbacks.AuthNoticeListener;
import com.dmdirc.parser.common.CallbackNotFoundException;
import com.dmdirc.parser.interfaces.callbacks.CallbackInterface;
import com.dmdirc.parser.interfaces.callbacks.ChannelKickListener;
import com.dmdirc.parser.interfaces.callbacks.ConnectErrorListener;
import com.dmdirc.parser.interfaces.callbacks.ErrorInfoListener;
import com.dmdirc.parser.interfaces.callbacks.NumericListener;
import com.dmdirc.parser.interfaces.callbacks.Post005Listener;
import com.dmdirc.parser.interfaces.callbacks.PrivateActionListener;
import com.dmdirc.parser.interfaces.callbacks.PrivateCtcpListener;
import com.dmdirc.parser.interfaces.callbacks.PrivateMessageListener;
import com.dmdirc.parser.interfaces.callbacks.ServerErrorListener;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Collection;
import javax.net.ssl.TrustManager;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
public class IRCParserTest {
private static interface TestCallback extends CallbackInterface { }
@Test
public void testIssue42() {
// Invalid callback names are silently ignored instead of raising exceptions
boolean res = false;
try {
final IRCParser myParser = new IRCParser();
myParser.getCallbackManager().addCallback(TestCallback.class, mock(TestCallback.class));
} catch (CallbackNotFoundException ex) {
res = true;
}
assertTrue("addCallback() should throw exception for non-existant callbacks", res);
}
@Test
public void testIssue1674() {
// parser nick change error with dual 001
final ErrorInfoListener error = mock(ErrorInfoListener.class);
final TestParser myParser = new TestParser();
myParser.getCallbackManager().addCallback(ErrorInfoListener.class, error);
myParser.injectConnectionStrings();
myParser.nick = "nick2";
myParser.injectConnectionStrings();
myParser.injectLine(":nick2!ident@host NICK :nick");
verify(error, never()).onErrorInfo((IRCParser) anyObject(), (ParserError) anyObject());
}
@Test
public void testProxyPortWithBindIP() {
final TestIConnectError tice = new TestIConnectError();
final ServerInfo si = new ServerInfo();
si.setProxyPort(155555);
si.setUseSocks(true);
final IRCParser myParser = new IRCParser(si);
myParser.getCallbackManager().addCallback(ConnectErrorListener.class, tice);
myParser.setBindIP("0.0.0.0");
myParser.run();
assertTrue("Using an invalid socks proxy port should raise a connect error event",
tice.error);
}
@Test
public void testTokeniser() {
final String line1 = "a b c d e";
final String line2 = "a b c :d e";
final String line3 = ":a b:c :d e";
final String line4 = null;
final String[] res1 = IRCParser.tokeniseLine(line1);
final String[] res2 = IRCParser.tokeniseLine(line2);
final String[] res3 = IRCParser.tokeniseLine(line3);
final String[] res4 = IRCParser.tokeniseLine(line4);
assertTrue(Arrays.equals(res1, new String[]{"a", "b", "c", "d", "e"}));
assertTrue(Arrays.equals(res2, new String[]{"a", "b", "c", "d e"}));
assertTrue(Arrays.equals(res3, new String[]{":a", "b:c", "d e"}));
assertTrue(Arrays.equals(res4, new String[]{""}));
}
@Test
public void testSendConnectionStrings1() throws URISyntaxException {
final MyInfo myInfo = new MyInfo();
myInfo.setNickname("Nickname");
myInfo.setRealname("Real name");
myInfo.setUsername("Username");
final TestParser parser = new TestParser(myInfo, new URI("irc://irc.testing.dmdirc:6667/"));
parser.sendConnectionStrings();
assertEquals(2, parser.sentLines.size());
assertTrue("Should send nickname line",
Arrays.equals(parser.getLine(0), new String[]{"NICK", "Nickname"}));
final String[] userParts = parser.getLine(1);
assertEquals("First token should be USER", "USER", userParts[0]);
assertEquals("USER should contain username", myInfo.getUsername().toLowerCase(),
userParts[1].toLowerCase());
assertEquals("USER should contain server name", "irc.testing.dmdirc", userParts[3]);
assertEquals("USER should contain real name", "Real name", userParts[4]);
}
@Test
public void testSendConnectionStrings2() throws URISyntaxException {
final MyInfo myInfo = new MyInfo();
myInfo.setNickname("Nickname");
myInfo.setRealname("Real name");
myInfo.setUsername("Username");
final TestParser parser = new TestParser(myInfo, new URI("irc://password@irc.testing.dmdirc:6667/"));
parser.sendConnectionStrings();
assertEquals(3, parser.sentLines.size());
assertTrue("Should send password line",
Arrays.equals(parser.getLine(0), new String[]{"PASS", "password"}));
}
@Test
public void testPingPong() {
final TestParser parser = new TestParser();
parser.injectLine("PING :flubadee7291");
assertTrue("Should reply to PINGs with PONGs",
Arrays.equals(parser.getLine(0), new String[]{"PONG", "flubadee7291"}));
}
@Test
public void testError() throws CallbackNotFoundException {
final TestIServerError test = new TestIServerError();
final TestParser parser = new TestParser();
parser.getCallbackManager().addCallback(ServerErrorListener.class, test);
parser.injectLine("ERROR :You smell of cheese");
assertNotNull(test.message);
assertEquals("ERROR message should be passed to callback",
"You smell of cheese", test.message);
}
@Test
public void testAuthNotices() throws CallbackNotFoundException {
final TestINoticeAuth test = new TestINoticeAuth();
final TestParser parser = new TestParser();
parser.getCallbackManager().addCallback(AuthNoticeListener.class, test);
parser.sendConnectionStrings();
parser.injectLine("NOTICE AUTH :Random auth notice?");
assertNotNull(test.message);
assertEquals("Random auth notice?", test.message);
test.message = null;
parser.injectLine(":us.ircnet.org 020 * :Stupid notice");
assertNotNull(test.message);
assertEquals("Stupid notice", test.message);
}
@Test
public void testPre001NickChange() throws CallbackNotFoundException {
final TestINoticeAuth test = new TestINoticeAuth();
final TestParser parser = new TestParser();
parser.getCallbackManager().addCallback(AuthNoticeListener.class, test);
parser.sendConnectionStrings();
parser.injectLine(":chris!@ NICK :user2");
assertNull(test.message);
}
@Test
public void testNumeric() throws CallbackNotFoundException {
final TestINumeric test = new TestINumeric();
final TestParser parser = new TestParser();
parser.getCallbackManager().addCallback(NumericListener.class, test);
parser.injectLine(":server 001 nick :Hi there, nick");
assertEquals(1, test.numeric);
assertTrue(Arrays.equals(new String[]{":server", "001", "nick", "Hi there, nick"},
test.data));
}
@Test
public void testPost005() throws CallbackNotFoundException {
final TestIPost005 test = new TestIPost005();
final TestParser parser = new TestParser();
parser.getCallbackManager().addCallback(Post005Listener.class, test);
final String[] strings = {
"NOTICE AUTH :Blah, blah",
":server 020 * :Blah! Blah!",
":server 001 nick :Welcome to the Testing IRC Network, nick",
":server 002 nick :Your host is server.net, running version foo",
"NOTICE AUTH :I'm a retarded server",
":server 003 nick :This server was created Sun Jan 6 2008 at 17:34:54 CET",
":server 004 nick server.net foo dioswkgxRXInP biklmnopstvrDcCNuMT bklov",
":server 005 nick WHOX WALLCHOPS WALLVOICES USERIP :are supported by this server",
":server 005 nick MAXNICKLEN=15 TOPICLEN=250 AWAYLEN=160 :are supported " +
"by this server",
":server 375 nick :zomg, motd!",
};
for (String string : strings) {
assertFalse("OnPost005 fired too early", test.done);
parser.injectLine(string);
}
assertTrue("OnPost005 not fired", test.done);
}
@Test
public void test005Parsing() {
final TestParser parser = new TestParser();
final String[] strings = {
":server 001 nick :Welcome to the Testing IRC Network, nick",
":server 002 nick :Your host is server.net, running version foo",
":server 003 nick :This server was created Sun Jan 6 2008 at 17:34:54 CET",
":server 004 nick server.net foo dioswkgxRXInP biklmnopstvrDcCNuMT bklov",
":server 005 nick WHOX WALLCHOPS WALLVOICES NETWORK=moo :are supported by" +
" this server",
":server 005 nick MAXNICKLEN=15 MAXLIST=b:10,e:22,I:45 :are supported by" +
" this server",
":server 375 nick :zomg, motd!",
};
for (String string : strings) {
parser.injectLine(string);
}
assertEquals(10, parser.getMaxListModes('b'));
assertEquals(22, parser.getMaxListModes('e'));
assertEquals(45, parser.getMaxListModes('I'));
assertEquals("getMaxListModes should return 0 for unknowns;", 0,
parser.getMaxListModes('z'));
assertEquals("moo", parser.getNetworkName());
assertEquals("server", parser.getServerName());
}
@Test
public void testBindIP() {
final TestParser parser = new TestParser();
parser.setBindIP("abc.def.ghi.123");
assertEquals("abc.def.ghi.123", parser.getBindIP());
}
@Test
public void testCreateFake() {
final TestParser parser = new TestParser();
parser.setCreateFake(false);
assertFalse(parser.getCreateFake());
parser.setCreateFake(true);
assertTrue(parser.getCreateFake());
}
@Test
public void testAutoListMode() {
final TestParser parser = new TestParser();
parser.setAutoListMode(false);
assertFalse(parser.getAutoListMode());
parser.setAutoListMode(true);
assertTrue(parser.getAutoListMode());
}
@Test
public void testRemoveAfterCallback() {
final TestParser parser = new TestParser();
parser.setRemoveAfterCallback(false);
assertFalse(parser.getRemoveAfterCallback());
parser.setRemoveAfterCallback(true);
assertTrue(parser.getRemoveAfterCallback());
}
@Test
public void testAddLastLine() {
final TestParser parser = new TestParser();
parser.setAddLastLine(false);
assertFalse(parser.getAddLastLine());
parser.setAddLastLine(true);
assertTrue(parser.getAddLastLine());
}
@Test
public void testDisconnectOnFatal() {
final TestParser parser = new TestParser();
parser.setDisconnectOnFatal(false);
assertFalse(parser.getDisconnectOnFatal());
parser.setDisconnectOnFatal(true);
assertTrue(parser.getDisconnectOnFatal());
}
@Test
public void testTrustManager() {
final TestParser parser = new TestParser();
assertTrue(Arrays.equals(parser.getDefaultTrustManager(), parser.getTrustManager()));
parser.setTrustManagers(new TrustManager[0]);
assertTrue(Arrays.equals(new TrustManager[0], parser.getTrustManager()));
}
@Test
public void testPrivateMessages() throws CallbackNotFoundException {
final TestParser parser = new TestParser();
final TestIPrivateMessage ipmtest = new TestIPrivateMessage();
final TestIPrivateAction ipatest = new TestIPrivateAction();
final TestIPrivateCTCP ipctest = new TestIPrivateCTCP();
parser.injectConnectionStrings();
parser.getCallbackManager().addCallback(PrivateMessageListener.class, ipmtest);
parser.getCallbackManager().addCallback(PrivateActionListener.class, ipatest);
parser.getCallbackManager().addCallback(PrivateCtcpListener.class, ipctest);
parser.injectLine(":a!b@c PRIVMSG nick :Hello!");
assertNotNull(ipmtest.host);
assertNull(ipatest.host);
assertNull(ipctest.host);
assertEquals("a!b@c", ipmtest.host);
assertEquals("Hello!", ipmtest.message);
ipmtest.host = null;
ipmtest.message = null;
parser.injectLine(":a!b@c PRIVMSG nick :" + ((char) 1) + "ACTION meep" + ((char) 1));
assertNull(ipmtest.host);
assertNotNull(ipatest.host);
assertNull(ipctest.host);
assertEquals("a!b@c", ipatest.host);
assertEquals("meep", ipatest.message);
ipatest.host = null;
ipatest.message = null;
parser.injectLine(":a!b@c PRIVMSG nick :" + ((char) 1) + "FOO meep" + ((char) 1));
assertNull(ipmtest.host);
assertNull(ipatest.host);
assertNotNull(ipctest.host);
assertEquals("a!b@c", ipctest.host);
assertEquals("FOO", ipctest.type);
assertEquals("meep", ipctest.message);
}
private void testListModes(String numeric1, String numeric2, char mode) {
final TestParser parser = new TestParser();
parser.injectConnectionStrings();
parser.injectLine(":nick JOIN
parser.injectLine(":server " + numeric1 + " nick #D ban1!ident@.host bansetter1 1001");
parser.injectLine(":server " + numeric1 + " nick #D ban2!*@.host bansetter2 1002");
parser.injectLine(":server " + numeric1 + " nick #D ban3!ident@* bansetter3 1003");
parser.injectLine(":server " + numeric2 + " nick #D :End of Channel Something List");
final Collection<ChannelListModeItem> items
= parser.getChannel("#D").getListMode(mode);
assertEquals(3, items.size());
boolean gotOne = false, gotTwo = false, gotThree = false;
for (ChannelListModeItem item : items) {
if (item.getItem().equals("ban1!ident@.host")) {
assertEquals("bansetter1", item.getOwner());
assertEquals(1001l, item.getTime());
assertFalse(gotOne);
gotOne = true;
} else if (item.getItem().equals("ban2!*@.host")) {
assertEquals("bansetter2", item.getOwner());
assertEquals(1002l, item.getTime());
assertFalse(gotTwo);
gotTwo = true;
} else if (item.toString().equals("ban3!ident@*")) {
assertEquals("bansetter3", item.getOwner());
assertEquals(1003l, item.getTime());
assertFalse(gotThree);
gotThree = true;
}
}
assertTrue(gotOne);
assertTrue(gotTwo);
assertTrue(gotThree);
}
@Test
public void testNormalBans() {
testListModes("367", "368", 'b');
}
@Test
public void testInvexList() {
testListModes("346", "347", 'I');
}
@Test
public void testExemptList() {
testListModes("348", "349", 'e');
}
@Test
public void testReopList() {
testListModes("344", "345", 'R');
}
@Test
public void testGetParam() {
assertEquals("abc def", TestParser.getParam("foo :abc def"));
assertEquals("bar :abc def", TestParser.getParam("foo :bar :abc def"));
assertEquals("abc def", TestParser.getParam("abc def"));
}
@Test
public void testKick() throws CallbackNotFoundException {
final TestParser parser = new TestParser();
final ChannelKickListener ick = mock(ChannelKickListener.class);
parser.injectConnectionStrings();
parser.injectLine(":nick JOIN
parser.getCallbackManager().addCallback(ChannelKickListener.class, ick, "
parser.injectLine(":bar!me@moo KICK #D nick :Bye!");
verify(ick).onChannelKick(same(parser), (IRCChannelInfo) anyObject(),
(IRCChannelClientInfo) anyObject(), (IRCChannelClientInfo) anyObject(),
anyString(), anyString());
}
@Test
public void testIRCds() {
doIRCdTest("u2.10.12.10+snircd(1.3.4)", "snircd");
doIRCdTest("u2.10.12.12", "ircu");
doIRCdTest("hyperion-1.0.2b", "hyperion");
doIRCdTest("hybrid-7.2.3", "hybrid");
doIRCdTest("Unreal3.2.6", "unreal");
doIRCdTest("bahamut-1.8(04)", "bahamut");
}
@Test
public void testIllegalPort1() throws URISyntaxException {
final TestParser tp = new TestParser(new MyInfo(), new URI("irc://127.0.0.1:0/"));
final TestIConnectError tiei = new TestIConnectError();
tp.getCallbackManager().addCallback(ConnectErrorListener.class, tiei);
tp.runSuper();
assertTrue(tiei.error);
}
@Test
public void testIllegalPort2() throws URISyntaxException {
final TestParser tp = new TestParser(new MyInfo(), new URI("irc://127.0.0.1:1/"));
final TestIConnectError tiei = new TestIConnectError();
tp.getCallbackManager().addCallback(ConnectErrorListener.class, tiei);
tp.runSuper();
assertTrue(tiei.error);
}
@Test
public void testIllegalPort3() throws URISyntaxException {
final TestParser tp = new TestParser(new MyInfo(), new URI("irc://127.0.0.1:65570/"));
final TestIConnectError tiei = new TestIConnectError();
tp.getCallbackManager().addCallback(ConnectErrorListener.class, tiei);
tp.runSuper();
assertTrue(tiei.error);
}
private void doIRCdTest(final String ircd, final String expected) {
final TestParser parser = new TestParser();
String[] strings = {
":server 001 nick :Welcome to the Testing IRC Network, nick",
":server 002 nick :Your host is server.net, running version %s",
":server 003 nick :This server was created Sun Jan 6 2008 at 17:34:54 CET",
":server 004 nick server.net %s dioswkgxRXInP biklmnopstvrDcCNuMT bklov"
};
for (String line : strings) {
parser.injectLine(String.format(line, ircd));
}
assertEquals(ircd, parser.getServerSoftware());
assertEquals(expected.toLowerCase(), parser.getServerSoftwareType().toLowerCase());
}
}
|
package com.redhat.ceylon.compiler.typechecker.analyzer;
import static java.lang.Character.toChars;
import static java.lang.Integer.parseInt;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.antlr.runtime.Token;
import com.redhat.ceylon.compiler.typechecker.parser.CeylonLexer;
import com.redhat.ceylon.compiler.typechecker.tree.Node;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.CharLiteral;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.FloatLiteral;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.Literal;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.NaturalLiteral;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.QuotedLiteral;
import com.redhat.ceylon.compiler.typechecker.tree.Tree.StringLiteral;
import com.redhat.ceylon.compiler.typechecker.tree.Visitor;
public class LiteralVisitor extends Visitor {
@Override
public void visit(StringLiteral that) {
StringBuilder result = new StringBuilder();
stripIndent(that.getText(), getIndentPosition(that), result);
interpolateEscapes(result, that);
int type = that.getToken().getType();
if (type==CeylonLexer.STRING_END ||
type==CeylonLexer.STRING_MID) {
result.deleteCharAt(0);
}
if (type==CeylonLexer.STRING_START ||
type==CeylonLexer.STRING_MID) {
result.deleteCharAt(result.length()-1);
}
that.setText(result.toString());
}
@Override
public void visit(QuotedLiteral that) {
StringBuilder result = new StringBuilder();
stripIndent(that.getText(), getIndentPosition(that), result);
//interpolateEscapes(result, that);
that.setText(result.toString());
}
private int getIndentPosition(Literal that) {
Token token = that.getToken();
return token==null ? 0 : token.getCharPositionInLine()+1;
}
@Override
public void visit(CharLiteral that) {
StringBuilder result = new StringBuilder(that.getText());
interpolateEscapes(result, that);
that.setText(result.toString());
}
@Override
public void visit(FloatLiteral that) {
that.setText(that.getText()
.replace("_", "")
.replace("k", "e+3")
.replace("M", "e+6")
.replace("G", "e+9")
.replace("T", "e+12")
.replace("P", "e+15")
.replace("m", "e-3")
.replace("u", "e-6")
.replace("n", "e-9")
.replace("p", "e-12")
.replace("f", "e-15"));
}
@Override
public void visit(NaturalLiteral that) {
that.setText(that.getText()
.replace("_", "")
.replace("k", "000")
.replace("M", "000000")
.replace("G", "000000000")
.replace("T", "000000000000")
.replace("P", "000000000000000"));
}
private static void stripIndent(final String text, final int start,
final StringBuilder result) {
int num = 0;
for (String line: text.split("\n|\r\n?")) {
if (num++==0 || line.length()<start) {
result.append(line);
}
else {
boolean trimIndent = true;
for (int i=0; i<start; i++) {
if (line.charAt(i)!=' ') {
trimIndent = false;
break;
}
}
if (trimIndent) {
result.append(line.substring(start));
}
else {
result.append(line);
}
}
result.append("\n");
}
result.setLength(result.length()-1);
}
private static Pattern re = Pattern.compile("\\\\(\\{
private static void interpolateEscapes(final StringBuilder result, Node node) {
Matcher m;
int start=0;
while ((m = re.matcher(result)).find(start)) {
String hex = m.group(2);
if (hex!=null) {
if (hex.length()!=2 && hex.length()!=4 && hex.length()!=8) {
node.addError("illegal unicode escape sequence: must consist of 2, 4 or 8 digits");
}
else {
int codePoint=0;
try {
codePoint = parseInt(hex, 16);
}
catch (NumberFormatException nfe) {
node.addError("illegal unicode escape sequence: '" +
hex + "' is not a hexadecimal number");
}
result.replace(m.start(), m.end(), new String(toChars(codePoint)));
}
}
else {
char escape = m.group(3).charAt(0);
char ch;
switch (escape) {
case 'b': ch = '\b'; break;
case 't': ch = '\t'; break;
case 'n': ch = '\n'; break;
case 'f': ch = '\f'; break;
case 'r': ch = '\r'; break;
case '"':
case '\'':
case '`':
case '\\':
ch = escape; break;
default:
node.addError("illegal escape sequence: \\" + escape);
ch='?';
}
result.replace(m.start(), m.end(), Character.toString(ch));
}
start = m.start()+1;
}
}
}
|
package edu.mines.alterego;
class CharacterData {
public int id;
public String name;
public String description;
/**
* <p>
* Creates a model object for a Character. Each character must have a name
* and description, and a game that the character belongs to. These values
* should come directly from the database.
* </p>
*
* @param gameId ID to the game to belong to
* @param name Name of the character
* @param description Description of the character. Could be physical, behavioral, or even a full backstory
*/
CharacterData(int gameId, String name, String description) {
id = gameId;
this.name = name;
this.description = description;
}
@Override
public String toString() {
return name + ": " + description;
}
}
|
package org.rstudio.studio.client.rsconnect.ui;
import java.util.ArrayList;
import org.rstudio.core.client.JsArrayUtil;
import org.rstudio.core.client.StringUtil;
import org.rstudio.core.client.files.FileSystemItem;
import org.rstudio.core.client.widget.Operation;
import org.rstudio.core.client.widget.OperationWithInput;
import org.rstudio.core.client.widget.ProgressIndicator;
import org.rstudio.core.client.widget.ProgressOperationWithInput;
import org.rstudio.core.client.widget.ThemedButton;
import org.rstudio.studio.client.RStudioGinjector;
import org.rstudio.studio.client.common.FileDialogs;
import org.rstudio.studio.client.common.FilePathUtils;
import org.rstudio.studio.client.common.GlobalDisplay;
import org.rstudio.studio.client.rsconnect.RSConnect;
import org.rstudio.studio.client.rsconnect.model.RSConnectAccount;
import org.rstudio.studio.client.rsconnect.model.RSConnectApplicationInfo;
import org.rstudio.studio.client.rsconnect.model.RSConnectDeploymentFiles;
import org.rstudio.studio.client.rsconnect.model.RSConnectDeploymentRecord;
import org.rstudio.studio.client.rsconnect.model.RSConnectPublishResult;
import org.rstudio.studio.client.rsconnect.model.RSConnectPublishSettings;
import org.rstudio.studio.client.rsconnect.model.RSConnectPublishSource;
import org.rstudio.studio.client.rsconnect.model.RSConnectServerOperations;
import org.rstudio.studio.client.server.ServerError;
import org.rstudio.studio.client.server.ServerRequestCallback;
import org.rstudio.studio.client.workbench.prefs.model.UIPrefs;
import com.google.gwt.core.client.GWT;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.dom.client.Style.FontWeight;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.resources.client.ClientBundle;
import com.google.gwt.resources.client.CssResource;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.ui.Anchor;
import com.google.gwt.user.client.ui.CheckBox;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.Grid;
import com.google.gwt.user.client.ui.HTMLPanel;
import com.google.gwt.user.client.ui.Image;
import com.google.gwt.user.client.ui.InlineLabel;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwt.user.client.ui.Widget;
import com.google.inject.Inject;
public class RSConnectDeploy extends Composite
{
private static RSConnectDeployUiBinder uiBinder = GWT
.create(RSConnectDeployUiBinder.class);
interface RSConnectDeployUiBinder extends UiBinder<Widget, RSConnectDeploy>
{
}
public interface DeployStyle extends CssResource
{
String accountAnchor();
String accountList();
String controlLabel();
String deployLabel();
String descriptionPanel();
String dropListControl();
String fileList();
String firstControlLabel();
String gridControl();
String launchCheck();
String normalStatus();
String otherStatus();
String rootCell();
String source();
String sourceDestLabels();
String statusLabel();
String transferArrow();
String urlAnchor();
String wizard();
String progressPanel();
String appDetailsPanel();
}
public interface DeployResources extends ClientBundle
{
ImageResource publishShinyIllustration();
ImageResource publishRmdIllustration();
ImageResource publishPlotIllustration();
ImageResource publishPresentationIllustration();
ImageResource publishHTMLIllustration();
@Source("RSConnectDeploy.css")
DeployStyle style();
}
public static DeployResources RESOURCES = GWT.create(DeployResources.class);
public RSConnectDeploy(RSConnectPublishSource source,
int contentType,
RSConnectDeploymentRecord fromPrevious,
boolean asWizard)
{
if (source != null)
{
forDocument_ = source.isDocument();
}
else
{
forDocument_ = asWizard;
}
contentType_ = contentType;
fromPrevious_ = fromPrevious;
// import static/code and single/multiple settings from previous
// deployment, if we have one
if (fromPrevious != null)
{
asMultipleRmd_ = fromPrevious.getAsMultiple();
asStatic_ = fromPrevious.getAsStatic();
}
// inject dependencies
RStudioGinjector.INSTANCE.injectMembers(this);
// create UI
initWidget(uiBinder.createAndBindUi(this));
style_ = RESOURCES.style();
if (asWizard)
{
deployIllustration_.setVisible(false);
rootPanel_.addStyleName(style_.wizard());
}
// Invoke the "add account" wizard
addAccountAnchor_.addClickHandler(new ClickHandler()
{
@Override
public void onClick(ClickEvent event)
{
connector_.showAccountWizard(false, new OperationWithInput<Boolean>()
{
@Override
public void execute(Boolean successful)
{
if (successful)
{
accountList_.refreshAccountList();
}
}
});
event.preventDefault();
event.stopPropagation();
}
});
addFileButton_.setVisible(forDocument_);
addFileButton_.getElement().getStyle().setMarginLeft(0, Unit.PX);
addFileButton_.addClickHandler(new ClickHandler()
{
@Override
public void onClick(ClickEvent arg0)
{
onAddFileClick();
}
});
previewButton_.getElement().getStyle().setMarginLeft(0, Unit.PX);
previewButton_.addClickHandler(new ClickHandler()
{
@Override
public void onClick(ClickEvent arg0)
{
if (display_ != null && !StringUtil.isNullOrEmpty(
source_.getDeployFile()))
{
display_.showHtmlFile(source_.getDeployFile());
}
}
});
// If we're loading a previous deployment, hide new app name fields
if (fromPrevious_ != null)
{
newAppPanel_.setVisible(false);
}
// If we already know the source, apply it
if (source_ != null)
{
applySource();
}
}
@Inject
public void initialize(RSConnectServerOperations server,
RSAccountConnector connector,
GlobalDisplay display,
UIPrefs prefs)
{
server_ = server;
connector_ = connector;
display_ = display;
prefs_ = prefs;
accountList_ = new RSConnectAccountList(server_, display_, false,
!asStatic_);
// when the account list finishes populating, select the account from the
// previous deployment if we have one
accountList_.setOnRefreshCompleted(new Operation() {
@Override
public void execute()
{
if (fromPrevious_ != null)
{
// when re-deploying, select the account used the last time
// around
accountList_.selectAccount(fromPrevious_.getAccount());
}
else
{
// when doing a first-time publish, select the account the user
// prefers (currently this just tracks the last account used)
RSConnectAccount preferred =
prefs_.preferredPublishAccount().getGlobalValue();
if (preferred != null)
{
accountList_.selectAccount(preferred);
}
}
}
});
// when the user selects a different account, show the appropriate UI
addAccountChangeHandler(new ChangeHandler()
{
@Override
public void onChange(ChangeEvent arg0)
{
if (fromPrevious_ != null)
{
boolean existing = accountList_.getSelectedAccount().equals(
fromPrevious_.getAccount());
appInfoPanel_.setVisible(existing);
newAppPanel_.setVisible(!existing);
// validate name if necessary
if (existing && onDeployEnabled_ != null)
onDeployEnabled_.execute();
else if (!existing)
appName_.validateAppName();
}
}
});
}
public void setSourceDir(String dir)
{
dir = StringUtil.shortPathName(FileSystemItem.createDir(dir), 250);
deployLabel_.setText(dir);
}
public void setNewAppName(String name)
{
appName_.setText(name);
}
public void setDefaultAccount(RSConnectAccount account)
{
accountList_.selectAccount(account);
}
public void setAccountList(JsArray<RSConnectAccount> accounts)
{
accountList_.setAccountList(accounts);
}
public void addFileToList(String path)
{
addFile(path, true);
}
public ArrayList<String> getFileList()
{
return getCheckedFileList(true);
}
public ArrayList<String> getIgnoredFileList()
{
return getCheckedFileList(false);
}
public String getNewAppName()
{
return appName_.getText();
}
public void showAppInfo(RSConnectApplicationInfo info)
{
if (info != null)
{
urlAnchor_.setText(info.getUrl());
urlAnchor_.setHref(info.getUrl());
}
appInfoPanel_.setVisible(true);
appDetailsPanel_.setVisible(true);
newAppPanel_.setVisible(false);
if (onDeployEnabled_ != null)
onDeployEnabled_.execute();
}
public HandlerRegistration addAccountChangeHandler(ChangeHandler handler)
{
return accountList_.addChangeHandler(handler);
}
public void setOnDeployEnabled(Command cmd)
{
appName_.setOnNameIsValid(cmd);
onDeployEnabled_ = cmd;
}
public void setOnDeployDisabled(Command cmd)
{
appName_.setOnNameIsInvalid(cmd);
}
public DeployStyle getStyle()
{
return style_;
}
public void onActivate(ProgressIndicator indicator)
{
populateAccountList(indicator, false);
populateDeploymentFiles(indicator);
}
public void setPublishSource(RSConnectPublishSource source,
int contentType, boolean asMultipleRmd, boolean asStatic)
{
source_ = source;
contentType_ = contentType;
asMultipleRmd_ = asMultipleRmd;
// not all destination accounts support static content
if (asStatic_ != asStatic)
{
accountList_.setShowCloudAccounts(!asStatic);
accountList_.refreshAccountList();
}
asStatic_ = asStatic;
applySource();
}
public void focus()
{
appName_.setFocus(true);
}
public RSConnectPublishResult getResult()
{
// compose the list of files that have been manually added; we want to
// include all the ones the user added but didn't later uncheck, so
// cross-reference the list we kept with the one returned by the dialog
ArrayList<String> deployFiles = getFileList();
ArrayList<String> additionalFiles = new ArrayList<String>();
for (String filePath: filesAddedManually_)
{
if (deployFiles.contains(filePath))
{
additionalFiles.add(filePath);
}
}
// if we're redeploying to the same account, use the previous app name;
// otherwise, read the new name the user's entered
String appName = fromPrevious_ != null &&
getSelectedAccount().equals(fromPrevious_.getAccount()) ?
fromPrevious_.getName() : getNewAppName();
// if this was new content, set this account as the default to use for
// new content
if (fromPrevious_ == null &&
!getSelectedAccount().equals(
prefs_.preferredPublishAccount().getGlobalValue()))
{
prefs_.preferredPublishAccount().setGlobalValue(getSelectedAccount());
prefs_.writeUIPrefs();
}
return new RSConnectPublishResult(
appName,
getSelectedAccount(),
source_,
new RSConnectPublishSettings(deployFiles,
additionalFiles,
getIgnoredFileList(),
asMultipleRmd_,
asStatic_));
}
public boolean isResultValid()
{
return appName_.validateAppName();
}
private void setFileList(ArrayList<String> files,
ArrayList<String> additionalFiles, ArrayList<String> ignoredFiles)
{
if (forDocument_)
{
fileChecks_ = new ArrayList<CheckBox>();
}
// clear existing file list
fileListPanel_.clear();
for (int i = 0; i < files.size(); i++)
{
boolean checked = true;
boolean add = true;
// if this file is marked ignored, uncheck it
if (ignoredFiles != null)
{
for (int j = 0; j < ignoredFiles.size(); j++)
{
if (ignoredFiles.get(j).equals(files.get(i)))
{
checked = false;
break;
}
}
}
// if this file is marked additional, don't add it twice (we're about
// to add the additional files separately below)
if (additionalFiles != null)
{
for (int j = 0; j < additionalFiles.size(); j++)
{
if (additionalFiles.get(j).equals(files.get(i)))
{
add = false;
break;
}
}
}
if (add)
{
addFile(files.get(i), checked);
}
}
// add any additional files
if (additionalFiles != null)
{
for (int i = 0; i < additionalFiles.size(); i++)
{
addFile(additionalFiles.get(i), true);
}
}
}
private RSConnectAccount getSelectedAccount()
{
return accountList_.getSelectedAccount();
}
private void setPreviousInfo()
{
// when the dialog is servicing a redeploy, find information on the
// content as currently deployed
if (fromPrevious_ != null)
{
appProgressName_.setText(fromPrevious_.getName());
appExistingName_.setText(fromPrevious_.getName());
appProgressPanel_.setVisible(true);
appInfoPanel_.setVisible(true);
// get all of the apps deployed from the account to the server
server_.getRSConnectAppList(
fromPrevious_.getAccountName(),
fromPrevious_.getServer(),
new ServerRequestCallback<JsArray<RSConnectApplicationInfo>>()
{
@Override
public void onResponseReceived(
JsArray<RSConnectApplicationInfo> infos)
{
// hide server progress
appProgressPanel_.setVisible(false);
// find an app with the same account, server, and name;
// when found, populate the UI with app details
for (int i = 0; i < infos.length(); i++)
{
RSConnectApplicationInfo info = infos.get(i);
if (info.getName() == fromPrevious_.getName())
{
showAppInfo(info);
break;
}
}
}
@Override
public void onError(ServerError error)
{
// it's okay if we fail here, since the application info
// display is purely informative
appProgressPanel_.setVisible(false);
showAppInfo(null);
}
});
}
}
private void populateAccountList(final ProgressIndicator indicator,
final boolean isRetry)
{
server_.getRSConnectAccountList(
new ServerRequestCallback<JsArray<RSConnectAccount>>()
{
@Override
public void onResponseReceived(JsArray<RSConnectAccount> accounts)
{
// if this is our first try, ask the user to connect an account
// since none are currently connected
if (accounts.length() == 0 && !isRetry)
{
connector_.showAccountWizard(true,
new OperationWithInput<Boolean>()
{
@Override
public void execute(Boolean input)
{
populateAccountList(indicator, true);
}
});
}
else
{
setAccountList(accounts);
setPreviousInfo();
}
}
@Override
public void onError(ServerError error)
{
indicator.onError("Error retrieving accounts:\n\n" +
error.getMessage());
indicator.onCompleted();
}
});
}
private void populateDeploymentFiles(final ProgressIndicator indicator)
{
if (source_ == null)
return;
// if this is a self-contained document, we don't need to scrape it for
// dependencies; just inject it directly into the list.
if (source_.isSelfContained())
{
ArrayList<String> files = new ArrayList<String>();
FileSystemItem selfContained = FileSystemItem.createFile(
source_.getDeployFile());
files.add(selfContained.getName());
setFileList(files, null, null);
setPrimaryFile(selfContained.getName());
return;
}
// read the parent directory if we're "deploying" a .R file
final String fileSource = source_.isDocument() ?
source_.getDeployFile() : source_.getDeployDir();
indicator.onProgress("Collecting files...");
server_.getDeploymentFiles(
fileSource,
asMultipleRmd_,
new ServerRequestCallback<RSConnectDeploymentFiles>()
{
@Override
public void onResponseReceived(RSConnectDeploymentFiles files)
{
if (files.getDirSize() > files.getMaxSize())
{
indicator.onError(
"The item to be deployed (" + fileSource + ") " +
"exceeds the maximum deployment size, which is " +
StringUtil.formatFileSize(files.getMaxSize()) + "." +
" Consider creating a new directory containing " +
"only the content you wish to deploy.");
}
else
{
if (files.getDirList() == null ||
files.getDirList().length() == 0)
{
indicator.onError("Could not determine the list of " +
"files to deploy.");
indicator.onCompleted();
}
setFileList(
JsArrayUtil.fromJsArrayString(files.getDirList()),
fromPrevious_ != null ?
fromPrevious_.getAdditionalFiles() : null,
fromPrevious_ != null ?
fromPrevious_.getIgnoredFiles() : null);
setPrimaryFile(
FileSystemItem.createFile(
source_.getDeployFile()).getName());
}
indicator.clearProgress();
}
@Override
public void onError(ServerError error)
{
// we need to have a list of files to deploy to proceed
indicator.onError("Could not find files to deploy: \n\n" +
error.getMessage());
indicator.onCompleted();
}
});
}
private void addFile(String path, boolean checked)
{
if (forDocument_)
{
CheckBox fileCheck = new CheckBox(path);
fileCheck.setValue(checked);
fileListPanel_.add(fileCheck);
fileChecks_.add(fileCheck);
}
else
{
fileListPanel_.add(new Label(path));
}
}
private ArrayList<String> getCheckedFileList(boolean checked)
{
ArrayList<String> files = new ArrayList<String>();
if (fileChecks_ == null)
return files;
for (int i = 0; i < fileChecks_.size(); i++)
{
if (fileChecks_.get(i).getValue() == checked)
{
files.add(fileChecks_.get(i).getText());
}
}
return files;
}
private void onAddFileClick()
{
FileDialogs dialogs = RStudioGinjector.INSTANCE.getFileDialogs();
final FileSystemItem sourceDir =
FileSystemItem.createDir(source_.getDeployDir());
dialogs.openFile("Select File",
RStudioGinjector.INSTANCE.getRemoteFileSystemContext(),
sourceDir,
new ProgressOperationWithInput<FileSystemItem>()
{
@Override
public void execute(FileSystemItem input,
ProgressIndicator indicator)
{
if (input != null)
{
String path = input.getPathRelativeTo(sourceDir);
if (path == null)
{
display_.showMessage(GlobalDisplay.MSG_INFO,
"Cannot Add File",
"Only files in the same folder as the " +
"document (" + sourceDir + ") or one of its " +
"sub-folders may be added.");
return;
}
else
{
// see if the file is already in the list (we don't
// want to duplicate an existing entry)
ArrayList<String> files = getFileList();
for (String file: files)
{
if (file.equals(path))
{
indicator.onCompleted();
return;
}
}
addFileToList(path);
filesAddedManually_.add(path);
}
}
indicator.onCompleted();
}
});
}
private void setPrimaryFile(String path)
{
if (fileChecks_ == null)
return;
for (int i = 0; i < fileChecks_.size(); i++)
{
CheckBox fileCheck = fileChecks_.get(i);
if (fileCheck.getText().equals(path))
{
// don't allow the user to unselect the primary file
fileCheck.setEnabled(false);
// make this bold and move it to the top
fileCheck.getElement().getStyle().setFontWeight(FontWeight.BOLD);
fileListPanel_.remove(fileCheck);
fileListPanel_.insert(fileCheck, 0);
}
}
}
private void applySource()
{
// If this is a self-contained file, don't show the file list; instead,
// show the description of what we're about to publish
if (source_.isSelfContained())
{
filePanel_.setVisible(false);
descriptionPanel_.setVisible(true);
if (contentType_ == RSConnect.CONTENT_TYPE_PLOT ||
contentType_ == RSConnect.CONTENT_TYPE_HTML)
{
descriptionImage_.setResource(
RSConnectResources.INSTANCE.previewPlot());
}
else if (contentType_ == RSConnect.CONTENT_TYPE_PRES)
{
descriptionImage_.setResource(
RSConnectResources.INSTANCE.previewPresentation());
}
else
{
descriptionImage_.setResource(
RSConnectResources.INSTANCE.previewDoc());
}
}
// if the app name textbox isn't populated, derive from the filename
// (for apps and documents--other content types use temporary filenames)
if (appName_.getText().isEmpty() &&
contentType_ == RSConnect.CONTENT_TYPE_APP ||
contentType_ == RSConnect.CONTENT_TYPE_DOCUMENT)
{
appName_.setText(FilePathUtils.fileNameSansExtension(
source_.getSourceFile()));
}
ImageResource illustration = null;
if (contentType_ == RSConnect.CONTENT_TYPE_APP)
illustration = RESOURCES.publishShinyIllustration();
else if (contentType_ == RSConnect.CONTENT_TYPE_PLOT)
illustration = RESOURCES.publishPlotIllustration();
else if (contentType_ == RSConnect.CONTENT_TYPE_DOCUMENT)
illustration = RESOURCES.publishRmdIllustration();
else if (contentType_ == RSConnect.CONTENT_TYPE_HTML)
illustration = RESOURCES.publishHTMLIllustration();
else if (contentType_ == RSConnect.CONTENT_TYPE_PRES)
illustration = RESOURCES.publishPresentationIllustration();
if (illustration != null)
deployIllustration_.setResource(illustration);
}
@UiField Anchor addAccountAnchor_;
@UiField Anchor urlAnchor_;
@UiField AppNameTextbox appName_;
@UiField Grid mainGrid_;
@UiField HTMLPanel appDetailsPanel_;
@UiField HTMLPanel appInfoPanel_;
@UiField HTMLPanel appProgressPanel_;
@UiField HTMLPanel newAppPanel_;
@UiField HTMLPanel rootPanel_;
@UiField Image deployIllustration_;
@UiField Image descriptionImage_;
@UiField InlineLabel deployLabel_;
@UiField Label appExistingName_;
@UiField Label appProgressName_;
@UiField Label nameLabel_;
@UiField ThemedButton addFileButton_;
@UiField ThemedButton previewButton_;
@UiField VerticalPanel fileListPanel_;
@UiField VerticalPanel filePanel_;
@UiField VerticalPanel descriptionPanel_;
@UiField(provided=true) RSConnectAccountList accountList_;
private ArrayList<CheckBox> fileChecks_;
private ArrayList<String> filesAddedManually_ =
new ArrayList<String>();
private RSConnectServerOperations server_;
private GlobalDisplay display_;
private RSAccountConnector connector_;
private UIPrefs prefs_;
private RSConnectPublishSource source_;
private boolean asMultipleRmd_;
private boolean asStatic_;
private int contentType_;
private Command onDeployEnabled_;
private final DeployStyle style_;
private final boolean forDocument_;
private final RSConnectDeploymentRecord fromPrevious_;
}
|
package org.postgresql.test.jdbc2;
import org.postgresql.test.JDBC2Tests;
import junit.framework.TestCase;
import java.sql.*;
/*
* TestCase to test the internal functionality of org.postgresql.jdbc2.DatabaseMetaData
*
* PS: Do you know how difficult it is to type on a train? ;-)
*
* $Id: DatabaseMetaDataTest.java,v 1.5 2002/04/16 15:25:17 davec Exp $
*/
public class DatabaseMetaDataTest extends TestCase
{
private Connection con;
/*
* Constructor
*/
public DatabaseMetaDataTest(String name)
{
super(name);
}
protected void setUp() throws Exception
{
con = JDBC2Tests.openDB();
JDBC2Tests.createTable( con, "testmetadata", "id int4, name text, updated timestamp" );
}
protected void tearDown() throws Exception
{
JDBC2Tests.dropTable( con, "testmetadata" );
JDBC2Tests.closeDB( con );
}
/*
* The spec says this may return null, but we always do!
*/
public void testGetMetaData()
{
try
{
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
ResultSet rs = dbmd.getTables( null, null, "test%", new String[] {"TABLE"});
assertTrue( rs.next() );
assertTrue( rs.getString("TABLE_NAME").equals("testmetadata") );
rs.close();
rs = dbmd.getColumns("", "", "test%", "%" );
assertTrue( rs.next() );
assertTrue( rs.getString("TABLE_NAME").equals("testmetadata") );
assertTrue( rs.getString("COLUMN_NAME").equals("id") );
assertTrue( rs.getInt("DATA_TYPE") == java.sql.Types.INTEGER );
assertTrue( rs.next() );
assertTrue( rs.getString("TABLE_NAME").equals("testmetadata") );
assertTrue( rs.getString("COLUMN_NAME").equals("name") );
assertTrue( rs.getInt("DATA_TYPE") == java.sql.Types.VARCHAR );
assertTrue( rs.next() );
assertTrue( rs.getString("TABLE_NAME").equals("testmetadata") );
assertTrue( rs.getString("COLUMN_NAME").equals("updated") );
assertTrue( rs.getInt("DATA_TYPE") == java.sql.Types.TIMESTAMP );
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
/*
* Test default capabilities
*/
public void testCapabilities()
{
try
{
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
assertTrue(dbmd.allProceduresAreCallable());
assertTrue(dbmd.allTablesAreSelectable()); // not true all the time
// This should always be false for postgresql (at least for 7.x)
assertTrue(!dbmd.isReadOnly());
// does the backend support this yet? The protocol does...
assertTrue(!dbmd.supportsMultipleResultSets());
// yes, as multiple backends can have transactions open
assertTrue(dbmd.supportsMultipleTransactions());
assertTrue(dbmd.supportsMinimumSQLGrammar());
assertTrue(!dbmd.supportsCoreSQLGrammar());
assertTrue(!dbmd.supportsExtendedSQLGrammar());
assertTrue(!dbmd.supportsANSI92EntryLevelSQL());
assertTrue(!dbmd.supportsANSI92IntermediateSQL());
assertTrue(!dbmd.supportsANSI92FullSQL());
assertTrue(!dbmd.supportsIntegrityEnhancementFacility());
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
public void testJoins()
{
try
{
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
assertTrue(dbmd.supportsOuterJoins());
assertTrue(dbmd.supportsFullOuterJoins());
assertTrue(dbmd.supportsLimitedOuterJoins());
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
public void testCursors()
{
try
{
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
assertTrue(!dbmd.supportsPositionedDelete());
assertTrue(!dbmd.supportsPositionedUpdate());
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
public void testNulls()
{
try
{
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
// We need to type cast the connection to get access to the
// PostgreSQL-specific method haveMinimumServerVersion().
// This is not available through the java.sql.Connection interface.
assertTrue( con instanceof org.postgresql.Connection );
assertTrue(!dbmd.nullsAreSortedAtStart());
assertTrue( dbmd.nullsAreSortedAtEnd() !=
((org.postgresql.Connection)con).haveMinimumServerVersion("7.2"));
assertTrue( dbmd.nullsAreSortedHigh() ==
((org.postgresql.Connection)con).haveMinimumServerVersion("7.2"));
assertTrue(!dbmd.nullsAreSortedLow());
assertTrue(dbmd.nullPlusNonNullIsNull());
assertTrue(dbmd.supportsNonNullableColumns());
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
public void testLocalFiles()
{
try
{
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
assertTrue(!dbmd.usesLocalFilePerTable());
assertTrue(!dbmd.usesLocalFiles());
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
public void testIdentifiers()
{
try
{
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
assertTrue(!dbmd.supportsMixedCaseIdentifiers()); // always false
assertTrue(dbmd.supportsMixedCaseQuotedIdentifiers()); // always true
assertTrue(!dbmd.storesUpperCaseIdentifiers()); // always false
assertTrue(dbmd.storesLowerCaseIdentifiers()); // always true
assertTrue(!dbmd.storesUpperCaseQuotedIdentifiers()); // always false
assertTrue(!dbmd.storesLowerCaseQuotedIdentifiers()); // always false
assertTrue(!dbmd.storesMixedCaseQuotedIdentifiers()); // always false
assertTrue(dbmd.getIdentifierQuoteString().equals("\""));
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
public void testTables()
{
try
{
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
// we can add columns
assertTrue(dbmd.supportsAlterTableWithAddColumn());
// we can't drop columns (yet)
assertTrue(!dbmd.supportsAlterTableWithDropColumn());
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
public void testSelect()
{
try
{
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
// yes we can?: SELECT col a FROM a;
assertTrue(dbmd.supportsColumnAliasing());
// yes we can have expressions in ORDERBY
assertTrue(dbmd.supportsExpressionsInOrderBy());
// Yes, an ORDER BY clause can contain columns that are not in the
// SELECT clause.
assertTrue(dbmd.supportsOrderByUnrelated());
assertTrue(dbmd.supportsGroupBy());
assertTrue(dbmd.supportsGroupByUnrelated());
assertTrue(dbmd.supportsGroupByBeyondSelect()); // needs checking
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
public void testDBParams()
{
try
{
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
assertTrue(dbmd.getURL().equals(JDBC2Tests.getURL()));
assertTrue(dbmd.getUserName().equals(JDBC2Tests.getUser()));
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
public void testDbProductDetails()
{
try
{
assertTrue(con instanceof org.postgresql.Connection);
org.postgresql.Connection pc = (org.postgresql.Connection) con;
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
assertTrue(dbmd.getDatabaseProductName().equals("PostgreSQL"));
assertTrue(dbmd.getDatabaseProductVersion().startsWith(Integer.toString(pc.this_driver.getMajorVersion()) + "." + Integer.toString(pc.this_driver.getMinorVersion())));
assertTrue(dbmd.getDriverName().equals("PostgreSQL Native Driver"));
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
public void testDriverVersioning()
{
try
{
assertTrue(con instanceof org.postgresql.Connection);
org.postgresql.Connection pc = (org.postgresql.Connection) con;
DatabaseMetaData dbmd = con.getMetaData();
assertNotNull(dbmd);
assertTrue(dbmd.getDriverVersion().equals(pc.this_driver.getVersion()));
assertTrue(dbmd.getDriverMajorVersion() == pc.this_driver.getMajorVersion());
assertTrue(dbmd.getDriverMinorVersion() == pc.this_driver.getMinorVersion());
}
catch (SQLException ex)
{
fail(ex.getMessage());
}
}
}
|
package org.xins.common.manageable;
import org.xins.common.MandatoryArgumentChecker;
import org.xins.common.text.FastStringBuffer;
/**
* Exception thrown when the initialization of a <code>Manageable</code>
* object failed.
*
* @version $Revision$ $Date$
* @author Ernst de Haan (<a href="mailto:ernst.dehaan@nl.wanadoo.com">ernst.dehaan@nl.wanadoo.com</a>)
*/
public final class InitializationException
extends Exception {
// Class fields
// Class functions
/**
* Creates a message based on the specified constructor argument.
*
* @param message
* the message passed to the constructor, or <code>null</code>.
*
* @param cause
* the cause exception, or <code>null</code>.
*
* @return
* the message, never <code>null</code>.
*/
private static final String createMessage(String message, Throwable cause)
throws IllegalArgumentException {
// TODO: Improve this method.
if (message != null) {
return message;
}
String exceptionMessage = cause.getMessage();
FastStringBuffer buffer = new FastStringBuffer(150);
buffer.append("Caught ");
buffer.append(cause.getClass().getName());
if (exceptionMessage != null && exceptionMessage.length() > 0) {
buffer.append(". Message: \"");
buffer.append(exceptionMessage);
buffer.append("\".");
} else {
buffer.append('.');
}
return buffer.toString();
}
// Constructors
/**
* Constructs a new <code>InitializationException</code> with the specified
* message.
*
* @param message
* the detail message, or <code>null</code>.
*/
public InitializationException(String message) {
this(message, null);
}
/**
* Constructs a new <code>InitializationException</code> with the specified
* cause exception.
*
* @param cause
* the cause exception, or <code>null</code>.
*/
public InitializationException(Throwable cause) {
this(null, cause);
}
/**
* Constructs a new <code>InitializationException</code> with the specified
* detail message and cause exception.
*
* @param message
* the detail message, or <code>null</code>.
*
* @param cause
* the cause exception, or <code>null</code>.
*/
public InitializationException(String message, Throwable cause) {
super(createMessage(message, cause), cause);
}
// Fields
// Methods
}
|
package nl.b3p.viewer.admin.stripes;
import java.text.SimpleDateFormat;
import java.util.*;
import javax.annotation.security.RolesAllowed;
import javax.persistence.NoResultException;
import net.sourceforge.stripes.action.*;
import net.sourceforge.stripes.validation.*;
import nl.b3p.viewer.config.app.*;
import nl.b3p.viewer.config.security.Group;
import nl.b3p.viewer.config.security.User;
import nl.b3p.viewer.config.services.BoundingBox;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.stripesstuff.stripersist.Stripersist;
/**
*
* @author Jytte Schaeffer
*/
@UrlBinding("/action/applicationsettings/")
@StrictBinding
@RolesAllowed({Group.ADMIN,Group.APPLICATION_ADMIN})
public class ApplicationSettingsActionBean extends ApplicationActionBean {
private static final Log log = LogFactory.getLog(ApplicationSettingsActionBean.class);
private static final String JSP = "/WEB-INF/jsp/application/applicationSettings.jsp";
private static final String DEFAULT_SPRITE = "/resources/images/default_sprite.png";
@Validate
private String name;
@Validate
private String version;
@Validate
private String owner;
@Validate
private boolean authenticatedRequired;
@Validate
private String mashupName;
@Validate
private Map<String,ClobElement> details = new HashMap<String,ClobElement>();
@ValidateNestedProperties({
@Validate(field="minx", maxlength=255),
@Validate(field="miny", maxlength=255),
@Validate(field="maxx", maxlength=255),
@Validate(field="maxy", maxlength=255)
})
private BoundingBox startExtent;
@ValidateNestedProperties({
@Validate(field="minx", maxlength=255),
@Validate(field="miny", maxlength=255),
@Validate(field="maxx", maxlength=255),
@Validate(field="maxy", maxlength=255)
})
private BoundingBox maxExtent;
//<editor-fold defaultstate="collapsed" desc="getters & setters">
public Map<String,ClobElement> getDetails() {
return details;
}
public void setDetails(Map<String, ClobElement> details) {
this.details = details;
}
public boolean getAuthenticatedRequired() {
return authenticatedRequired;
}
public void setAuthenticatedRequired(boolean authenticatedRequired) {
this.authenticatedRequired = authenticatedRequired;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getOwner() {
return owner;
}
public void setOwner(String owner) {
this.owner = owner;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public BoundingBox getStartExtent() {
return startExtent;
}
public void setStartExtent(BoundingBox startExtent) {
this.startExtent = startExtent;
}
public BoundingBox getMaxExtent() {
return maxExtent;
}
public void setMaxExtent(BoundingBox maxExtent) {
this.maxExtent = maxExtent;
}
public String getMashupName() {
return mashupName;
}
public void setMashupName(String mashupName) {
this.mashupName = mashupName;
}
//</editor-fold>
@DefaultHandler
@DontValidate
public Resolution view(){
if(application != null){
details = application.getDetails();
if(application.getOwner() != null){
owner = application.getOwner().getUsername();
}
startExtent = application.getStartExtent();
maxExtent = application.getMaxExtent();
name = application.getName();
version = application.getVersion();
authenticatedRequired = application.isAuthenticatedRequired();
}
// DEFAULT VALUES
if(!details.containsKey("iconSprite")) {
details.put("iconSprite", new ClobElement(DEFAULT_SPRITE));
}
if(!details.containsKey("stylesheetMetadata")) {
// TODO: Default value stylesheet metadata
details.put("stylesheetMetadata", new ClobElement(""));
}
if(!details.containsKey("stylesheetPrint")) {
// TODO: Default value stylesheet printen
details.put("stylesheetPrint", new ClobElement(""));
}
return new ForwardResolution(JSP);
}
@DontValidate
public Resolution newApplication(){
application = null;
applicationId = -1L;
// DEFAULT VALUES
details.put("iconSprite", new ClobElement(DEFAULT_SPRITE));
// TODO: Default value stylesheet metadata
details.put("stylesheetMetadata", new ClobElement(""));
// TODO: Default value stylesheet printen
details.put("stylesheetPrint", new ClobElement(""));
return new ForwardResolution(JSP);
}
@DontBind
public Resolution cancel() {
return new ForwardResolution(JSP);
}
public Resolution save() {
if(application == null){
application = new Application();
/*
* A new application always has a root and a background level.
*/
Level root = new Level();
root.setName("Applicatie");
Level background = new Level();
background.setName("Achtergrond");
background.setBackground(true);
root.getChildren().add(background);
background.setParent(root);
Stripersist.getEntityManager().persist(background);
Stripersist.getEntityManager().persist(root);
application.setRoot(root);
}
bindAppProperties();
Stripersist.getEntityManager().persist(application);
Stripersist.getEntityManager().getTransaction().commit();
getContext().getMessages().add(new SimpleMessage("Applicatie is opgeslagen"));
setApplication(application);
return new ForwardResolution(JSP);
}
/* XXX */
private void bindAppProperties() {
application.setName(name);
application.setVersion(version);
if(owner != null){
User appOwner = Stripersist.getEntityManager().find(User.class, owner);
application.setOwner(appOwner);
}
application.setStartExtent(startExtent);
application.setMaxExtent(maxExtent);
application.setAuthenticatedRequired(authenticatedRequired);
application.getDetails().putAll(details);
}
@ValidationMethod(on="save")
public void validate(ValidationErrors errors) throws Exception {
if(name == null) {
errors.add("name", new SimpleError("Naam is verplicht"));
return;
}
try {
Long foundId = null;
if(version == null){
foundId = (Long)Stripersist.getEntityManager().createQuery("select id from Application where name = :name and version is null")
.setMaxResults(1)
.setParameter("name", name)
.getSingleResult();
}else{
foundId = (Long)Stripersist.getEntityManager().createQuery("select id from Application where name = :name and version = :version")
.setMaxResults(1)
.setParameter("name", name)
.setParameter("version", version)
.getSingleResult();
}
if(application != null && application.getId() != null){
if( !foundId.equals(application.getId()) ){
errors.add("name", new SimpleError("Naam en versie moeten een unieke combinatie vormen."));
}
}else{
errors.add("name", new SimpleError("Naam en versie moeten een unieke combinatie vormen."));
}
} catch(NoResultException nre) {
// name version combination is unique
}
/*
* Check if owner is an excisting user
*/
if(owner != null){
try {
User appOwner = Stripersist.getEntityManager().find(User.class, owner);
if(appOwner == null){
errors.add("owner", new SimpleError("Gebruiker met deze naam bestaat niet."));
}
} catch(NoResultException nre) {
errors.add("owner", new SimpleError("Gebruiker met deze naam bestaat niet."));
}
}
if(startExtent != null){
if(startExtent.getMinx() == null || startExtent.getMiny() == null || startExtent.getMaxx() == null || startExtent.getMaxy() == null ){
errors.add("startExtent", new SimpleError("Alle velden van de start extentie moeten ingevult worden."));
}
}
if(maxExtent != null){
if(maxExtent.getMinx() == null || maxExtent.getMiny() == null || maxExtent.getMaxx() == null || maxExtent.getMaxy() == null ){
errors.add("maxExtent", new SimpleError("Alle velden van de max extentie moeten ingevult worden."));
}
}
}
public Resolution copy() throws Exception {
try {
Object o = Stripersist.getEntityManager().createQuery("select 1 from Application where name = :name")
.setMaxResults(1)
.setParameter("name", name)
.getSingleResult();
getContext().getMessages().add(new SimpleMessage("Kan niet kopieren; applicatie met naam \"{0}\" bestaat al", name));
return new RedirectResolution(this.getClass());
} catch(NoResultException nre) {
// name is unique
}
try {
bindAppProperties();
Application copy = application.deepCopy();
// don't save changes to original app
Stripersist.getEntityManager().detach(application);
Stripersist.getEntityManager().persist(copy);
Stripersist.getEntityManager().getTransaction().commit();
getContext().getMessages().add(new SimpleMessage("Applicatie is gekopieerd"));
setApplication(copy);
return new RedirectResolution(this.getClass());
} catch(Exception e) {
log.error(String.format("Error copying application #%d named %s %swith new name %s",
application.getId(),
application.getName(),
application.getVersion() == null ? "" : "v" + application.getVersion() + " ",
name), e);
String ex = e.toString();
Throwable cause = e.getCause();
while(cause != null) {
ex += ";\n<br>" + cause.toString();
cause = cause.getCause();
}
getContext().getValidationErrors().addGlobalError(new SimpleError("Fout bij kopieren applicatie: " + ex));
return new ForwardResolution(JSP);
}
}
public Resolution mashup(){
ValidationErrors errors = context.getValidationErrors();
try {
Level root = application.getRoot();
// Prevent copy-ing levels/layers
application.setRoot(null);
Application mashup = application.deepCopy();
Stripersist.getEntityManager().detach(application);
mashup.setRoot(root);
mashup.getDetails().put("isMashup", new ClobElement(Boolean.TRUE + ""));
mashup.setName(mashup.getName() + "_" + mashupName);
Stripersist.getEntityManager().persist(mashup);
Stripersist.getEntityManager().getTransaction().commit();
setApplication(mashup);
} catch (Exception ex) {
errors.add("Fout", new SimpleError("De mashup kan niet worden gemaakt."));
}
return new RedirectResolution(ApplicationSettingsActionBean.class);
}
public Resolution publish (){
// Find current published application and make backup
try {
Application oldPublished = (Application)Stripersist.getEntityManager().createQuery("from Application where name = :name AND version IS null")
.setMaxResults(1)
.setParameter("name", name)
.getSingleResult();
Date nowDate = new Date(System.currentTimeMillis());
SimpleDateFormat sdf = (SimpleDateFormat) SimpleDateFormat.getDateInstance();
sdf.applyPattern("HH-mm_dd-MM-yyyy");
String now = sdf.format(nowDate);
String uniqueVersion = findUniqueVersion(name, "B_"+now );
oldPublished.setVersion(uniqueVersion);
Stripersist.getEntityManager().persist(oldPublished);
Stripersist.getEntityManager().getTransaction().commit();
} catch(NoResultException nre) {
}
application.setVersion(null);
Stripersist.getEntityManager().persist(application);
Stripersist.getEntityManager().getTransaction().commit();
setApplication(null);
return new RedirectResolution(ChooseApplicationActionBean.class);
}
/**
* Checks if a Application with given name already exists and if needed
* returns name with sequence number in brackets added to make it unique.
* @param name Name to make unique
* @return A unique name for a FeatureSource
*/
public static String findUniqueVersion(String name, String version) {
int uniqueCounter = 0;
while(true) {
String testVersion;
if(uniqueCounter == 0) {
testVersion = version;
} else {
testVersion = version + " (" + uniqueCounter + ")";
}
try {
Stripersist.getEntityManager().createQuery("select 1 from Application where name = :name AND version = :version")
.setParameter("name", name)
.setParameter("version", testVersion)
.setMaxResults(1)
.getSingleResult();
uniqueCounter++;
} catch(NoResultException nre) {
version = testVersion;
break;
}
}
return version;
}
}
|
package nl.b3p.viewer.admin.stripes;
import java.io.StringReader;
import java.util.*;
import javax.persistence.EntityManager;
import javax.servlet.http.HttpServletResponse;
import net.sourceforge.stripes.action.*;
import net.sourceforge.stripes.validation.*;
import nl.b3p.viewer.config.app.*;
import nl.b3p.web.stripes.ErrorMessageResolution;
import org.json.*;
import org.stripesstuff.stripersist.Stripersist;
/**
*
* @author Jytte Schaeffer
*/
@UrlBinding("/action/applicationstartmap/{$event}")
@StrictBinding
public class ApplicationStartMapActionBean extends ApplicationActionBean {
private static final String JSP = "/WEB-INF/jsp/application/applicationStartMap.jsp";
@Validate
private String selectedContent;
private JSONArray jsonContent;
@Validate
private String contentToBeSelected;
@Validate
private String checkedLayersString;
private JSONArray jsonCheckedLayers;
//private List<Long> checkedLayers = new ArrayList();
private JSONArray allCheckedLayers = new JSONArray();
@Validate
private String nodeId;
@Validate
private String levelId;
private Level rootlevel;
@DefaultHandler
@HandlesEvent("default")
@DontValidate
public Resolution view() throws JSONException {
if (application == null) {
getContext().getMessages().add(new SimpleError("Er moet eerst een bestaande applicatie geactiveerd of een nieuwe applicatie gemaakt worden."));
return new ForwardResolution("/WEB-INF/jsp/application/chooseApplication.jsp");
} else {
rootlevel = application.getRoot();
getCheckedLayerList(allCheckedLayers, rootlevel);
}
return new ForwardResolution(JSP);
}
public Resolution save() throws JSONException {
rootlevel = application.getRoot();
jsonContent = new JSONArray(selectedContent);
jsonCheckedLayers = new JSONArray(checkedLayersString);
walkAppTreeForSave(rootlevel);
Stripersist.getEntityManager().getTransaction().commit();
getContext().getMessages().add(new SimpleMessage("Het startkaartbeeld is opgeslagen"));
getCheckedLayerList(allCheckedLayers, rootlevel);
return new ForwardResolution(JSP);
}
public Resolution canContentBeSelected() {
try {
jsonContent = new JSONArray(selectedContent);
if(jsonContent.length() == 0) {
JSONObject obj = new JSONObject();
obj.put("result", true);
return new StreamingResolution("application/json", new StringReader(obj.toString()));
}
JSONObject o = new JSONObject(contentToBeSelected);
Boolean result = true;
String message = null;
String id = o.getString("id");
if(o.get("type").equals("layer")) {
ApplicationLayer appLayer = Stripersist.getEntityManager().find(ApplicationLayer.class, new Long(id));
if(appLayer == null) {
message = "Kaartlaag met id " + id + " is onbekend!";
result = false;
} else {
/* An appLayer can not be selected if:
* - selectedContent contains the appLayer
* - the appLayer is a layer of any level or its children in selectedContent
*/
for(int i = 0; i < jsonContent.length(); i++) {
JSONObject content = jsonContent.getJSONObject(i);
if(content.getString("type").equals("layer")) {
if(id.equals(content.getString("id"))) {
result = false;
message = "Kaartlaag is al geselecteerd";
break;
}
} else {
Level l = Stripersist.getEntityManager().find(Level.class, new Long(content.getString("id")));
if(l != null) {
if(l.containsLayerInSubtree(appLayer)) {
result = false;
message = "Kaartlaag is al geselecteerd als onderdeel van een niveau";
break;
}
}
}
}
}
} else {
Level level = Stripersist.getEntityManager().find(Level.class, new Long(id));
if(level == null) {
result = false;
message = "Niveau met id " + id + " is onbekend!";
} else {
/* A level can not be selected if:
* any level in selectedContent is the level is a sublevel of the level
* any level in selectedContent is a parent (recursive) of the level
*/
for(int i = 0; i < jsonContent.length(); i++) {
JSONObject content = jsonContent.getJSONObject(i);
if(content.getString("type").equals("level")) {
if(id.equals(content.getString("id"))) {
result = false;
message = "Niveau is al geselecteerd";
break;
}
Level l = Stripersist.getEntityManager().find(Level.class, new Long(content.getString("id")));
if(l != null) {
if(l.containsLevelInSubtree(level)) {
result = false;
message = "Niveau kan niet worden geselecteerd omdat een bovenliggend niveau al geselecteerd is";
break;
}
if(l.isInSubtreeOf(level)) {
result = false;
message = "Niveau kan niet worden geselecteerd omdat een subniveau al geselecteerd is";
break;
}
}
} else {
ApplicationLayer appLayer = Stripersist.getEntityManager().find(ApplicationLayer.class, new Long(content.getString("id")));
if(level.containsLayerInSubtree(appLayer)) {
result = false;
message = "Niveau kan niet worden geselecteerd omdat een kaartlaag uit dit (of onderliggend) niveau al is geselecteerd";
break;
}
}
}
}
}
JSONObject obj = new JSONObject();
obj.put("result", result);
obj.put("message", message);
return new StreamingResolution("application/json", new StringReader(obj.toString()));
} catch(Exception e) {
return new ErrorMessageResolution("Exception " + e.getClass() + ": " + e.getMessage());
}
}
private void walkAppTreeForSave(Level l) throws JSONException{
l.setSelectedIndex(getSelectedContentIndex(l));
for(ApplicationLayer al: l.getLayers()) {
al.setSelectedIndex(getSelectedContentIndex(al));
al.setChecked(getCheckedForLayerId(al.getId()));
}
for(Level child: l.getChildren()) {
walkAppTreeForSave(child);
}
}
private boolean getCheckedForLayerId(Long levelid) throws JSONException {
for(int i = 0; i < jsonCheckedLayers.length(); i++){
if(levelid.equals(Long.parseLong(jsonCheckedLayers.getString(i)))) {
return true;
}
}
return false;
}
private Integer getSelectedContentIndex(Level l) throws JSONException{
Integer index = null;
for(int i = 0; i < jsonContent.length(); i++){
JSONObject js = jsonContent.getJSONObject(i);
String id = js.get("id").toString();
String type = js.get("type").toString();
if(id.equals(l.getId().toString()) && type.equals("level")){
index = i;
}
}
return index;
}
private Integer getSelectedContentIndex(ApplicationLayer al) throws JSONException{
Integer index = null;
for(int i = 0; i < jsonContent.length(); i++){
JSONObject js = jsonContent.getJSONObject(i);
String id = js.get("id").toString();
String type = js.get("type").toString();
if(id.equals(al.getId().toString()) && type.equals("layer")){
index = i;
}
}
return index;
}
public Resolution loadApplicationTree() throws JSONException {
EntityManager em = Stripersist.getEntityManager();
final JSONArray children = new JSONArray();
if (!nodeId.equals("n")) {
String type = nodeId.substring(0, 1);
int id = Integer.parseInt(nodeId.substring(1));
if (type.equals("n")) {
Level l = em.find(Level.class, new Long(id));
for (Level sub : l.getChildren()) {
JSONObject j = new JSONObject();
j.put("id", "n" + sub.getId());
j.put("name", sub.getName());
j.put("type", "level");
j.put("isLeaf", sub.getChildren().isEmpty() && sub.getLayers().isEmpty());
if (sub.getParent() != null) {
j.put("parentid", sub.getParent().getId());
}
children.put(j);
}
for (ApplicationLayer layer : l.getLayers()) {
JSONObject j = new JSONObject();
j.put("id", "s" + layer.getId());
j.put("name", layer.getLayerName());
j.put("type", "layer");
j.put("isLeaf", true);
j.put("parentid", nodeId);
children.put(j);
}
}
}
return new StreamingResolution("application/json") {
@Override
public void stream(HttpServletResponse response) throws Exception {
response.getWriter().print(children.toString());
}
};
}
public Resolution loadSelectedLayers() throws JSONException {
EntityManager em = Stripersist.getEntityManager();
final JSONArray children = new JSONArray();
rootlevel = application.getRoot();
if(levelId != null && levelId.substring(1).equals(rootlevel.getId().toString())){
List selectedObjects = new ArrayList();
walkAppTreeForStartMap(selectedObjects, rootlevel);
Collections.sort(selectedObjects, new Comparator() {
@Override
public int compare(Object lhs, Object rhs) {
Integer lhsIndex, rhsIndex;
if(lhs instanceof Level) {
lhsIndex = ((Level)lhs).getSelectedIndex();
} else {
lhsIndex = ((ApplicationLayer)lhs).getSelectedIndex();
}
if(rhs instanceof Level) {
rhsIndex = ((Level)rhs).getSelectedIndex();
} else {
rhsIndex = ((ApplicationLayer)rhs).getSelectedIndex();
}
return lhsIndex.compareTo(rhsIndex);
}
});
if(selectedObjects != null){
for (Iterator it = selectedObjects.iterator(); it.hasNext();) {
Object map = it.next();
if(map instanceof ApplicationLayer){
ApplicationLayer layer = (ApplicationLayer) map;
JSONObject j = new JSONObject();
j.put("id", "s" + layer.getId());
j.put("name", layer.getLayerName());
j.put("type", "layer");
j.put("isLeaf", true);
j.put("parentid", "");
j.put("checked", layer.isChecked());
children.put(j);
}else if(map instanceof Level){
Level level = (Level) map;
JSONArray checked = new JSONArray();
getCheckedLayerList(checked, level);
JSONObject j = new JSONObject();
j.put("id", "n" + level.getId());
j.put("name", level.getName());
j.put("type", "level");
j.put("isLeaf", level.getChildren().isEmpty() && level.getLayers().isEmpty());
j.put("parentid", "");
j.put("checkedlayers", checked);
// j.put("checked", false);
children.put(j);
}
}
}
}else{
String type = levelId.substring(0, 1);
int id = Integer.parseInt(levelId.substring(1));
if (type.equals("n")) {
Level l = em.find(Level.class, new Long(id));
for (Level sub : l.getChildren()) {
JSONObject j = new JSONObject();
j.put("id", "n" + sub.getId());
j.put("name", sub.getName());
j.put("type", "level");
j.put("isLeaf", sub.getChildren().isEmpty() && sub.getLayers().isEmpty());
if (sub.getParent() != null) {
j.put("parentid", sub.getParent().getId());
}
// j.put("checked", false);
children.put(j);
}
for (ApplicationLayer layer : l.getLayers()) {
JSONObject j = new JSONObject();
j.put("id", "s" + layer.getId());
j.put("name", layer.getLayerName());
j.put("type", "layer");
j.put("isLeaf", true);
j.put("parentid", levelId);
j.put("checked", layer.isChecked());
children.put(j);
}
}
}
return new StreamingResolution("application/json") {
@Override
public void stream(HttpServletResponse response) throws Exception {
response.getWriter().print(children.toString());
}
};
}
private static void walkAppTreeForStartMap(List selectedContent, Level l){
if(l.getSelectedIndex() != null) {
selectedContent.add(l);
}
for(ApplicationLayer al: l.getLayers()) {
if(al.getSelectedIndex() != null) {
selectedContent.add(al);
}
}
for(Level child: l.getChildren()) {
walkAppTreeForStartMap(selectedContent, child);
}
}
private static void getCheckedLayerList(JSONArray layers, Level l) throws JSONException{
for(ApplicationLayer al: l.getLayers()) {
if(al.isChecked()) {
layers.put(al.getId());
}
}
for(Level child: l.getChildren()) {
getCheckedLayerList(layers, child);
}
}
//<editor-fold defaultstate="collapsed" desc="getters & setters">
public String getCheckedLayersString() {
return checkedLayersString;
}
public void setCheckedLayersString(String checkedLayersString) {
this.checkedLayersString = checkedLayersString;
}
public String getSelectedContent() {
return selectedContent;
}
public void setSelectedContent(String selectedContent) {
this.selectedContent = selectedContent;
}
public Level getRootlevel() {
return rootlevel;
}
public void setRootlevel(Level rootlevel) {
this.rootlevel = rootlevel;
}
public String getLevelId() {
return levelId;
}
public void setLevelId(String levelId) {
this.levelId = levelId;
}
public JSONArray getAllCheckedLayers() {
return allCheckedLayers;
}
public void setAllCheckedLayers(JSONArray allCheckedLayers) {
this.allCheckedLayers = allCheckedLayers;
}
public String getNodeId() {
return nodeId;
}
public void setNodeId(String nodeId) {
this.nodeId = nodeId;
}
public String getContentToBeSelected() {
return contentToBeSelected;
}
public void setContentToBeSelected(String contentToBeSelected) {
this.contentToBeSelected = contentToBeSelected;
}
//</editor-fold>
}
|
package org.chromium.chrome.test;
import android.app.Activity;
import android.app.ActivityManager;
import android.app.ActivityManager.AppTask;
import android.app.Instrumentation;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.PowerManager;
import android.provider.Browser;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.widget.ListView;
import com.google.android.apps.chrome.R;
import junit.framework.Assert;
import org.chromium.base.PerfTraceEvent;
import org.chromium.base.ThreadUtils;
import org.chromium.base.annotations.SuppressFBWarnings;
import org.chromium.base.test.BaseActivityInstrumentationTestCase;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.PerfTest;
import org.chromium.chrome.browser.ChromeActivity;
import org.chromium.chrome.browser.ChromeMobileApplication;
import org.chromium.chrome.browser.ChromeSwitches;
import org.chromium.chrome.browser.ChromeTabbedActivity;
import org.chromium.chrome.browser.DeferredStartupHandler;
import org.chromium.chrome.browser.Tab;
import org.chromium.chrome.browser.document.ChromeLauncherActivity;
import org.chromium.chrome.browser.document.DocumentActivity;
import org.chromium.chrome.browser.document.DocumentMetricIds;
import org.chromium.chrome.browser.document.DocumentUtils;
import org.chromium.chrome.browser.document.IncognitoDocumentActivity;
import org.chromium.chrome.browser.infobar.InfoBar;
import org.chromium.chrome.browser.ntp.NewTabPage;
import org.chromium.chrome.browser.omaha.OmahaClient;
import org.chromium.chrome.browser.omnibox.AutocompleteController;
import org.chromium.chrome.browser.omnibox.LocationBarLayout;
import org.chromium.chrome.browser.omnibox.OmniboxResultsAdapter.OmniboxResultItem;
import org.chromium.chrome.browser.omnibox.OmniboxSuggestion;
import org.chromium.chrome.browser.omnibox.UrlBar;
import org.chromium.chrome.browser.preferences.NetworkPredictionOptions;
import org.chromium.chrome.browser.preferences.PrefServiceBridge;
import org.chromium.chrome.browser.preferences.Preferences;
import org.chromium.chrome.browser.preferences.PreferencesLauncher;
import org.chromium.chrome.browser.tabmodel.EmptyTabModelObserver;
import org.chromium.chrome.browser.tabmodel.TabModel;
import org.chromium.chrome.browser.tabmodel.TabModel.TabLaunchType;
import org.chromium.chrome.browser.tabmodel.TabModel.TabSelectionType;
import org.chromium.chrome.browser.tabmodel.TabModelObserver;
import org.chromium.chrome.browser.tabmodel.TabModelSelector;
import org.chromium.chrome.browser.util.FeatureUtilities;
import org.chromium.chrome.test.util.ActivityUtils;
import org.chromium.chrome.test.util.ApplicationData;
import org.chromium.chrome.test.util.ChromeTabUtils;
import org.chromium.chrome.test.util.MenuUtils;
import org.chromium.chrome.test.util.NewTabPageTestUtils;
import org.chromium.chrome.test.util.TestHttpServerClient;
import org.chromium.content.browser.test.util.CallbackHelper;
import org.chromium.content.browser.test.util.Criteria;
import org.chromium.content.browser.test.util.CriteriaHelper;
import org.chromium.content.browser.test.util.JavaScriptUtils;
import org.chromium.content.browser.test.util.RenderProcessLimit;
import org.chromium.content.browser.test.util.TestTouchUtils;
import org.chromium.content.browser.test.util.TouchCommon;
import org.chromium.content_public.browser.LoadUrlParams;
import org.chromium.ui.base.PageTransition;
import java.io.File;
import java.lang.reflect.Method;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Base class for all Chrome instrumentation tests.
* All tests must inherit from this class and define their own test methods
* See ChromeTabbedActivityTestBase.java for example.
* @param <T> A {@link ChromeActivity} class
*/
@CommandLineFlags.Add(ChromeSwitches.DISABLE_FIRST_RUN_EXPERIENCE)
public abstract class ChromeActivityTestCaseBase<T extends ChromeActivity>
extends BaseActivityInstrumentationTestCase<T> {
private static final String TAG = "ChromeActivityTestCaseBase";
// The number of ms to wait for the rendering activity to be started.
protected static final int ACTIVITY_START_TIMEOUT_MS = 1000;
private static final String PERF_NORUN_TAG = "--NORUN
private static final String PERF_ANNOTATION_FORMAT = "**PERFANNOTATION(%s):";
private static final String MEMORY_TRACE_GRAPH_SUFFIX = " - browser PSS";
private static final String PERF_OUTPUT_FILE = "PerfTestData.txt";
private static final long OMNIBOX_FIND_SUGGESTION_TIMEOUT_MS = 10 * 1000;
private static final float FLOAT_EPSILON = 0.001f;
public ChromeActivityTestCaseBase(Class<T> activityClass) {
super(activityClass);
}
protected boolean mSkipClearAppData = false;
private PowerManager.WakeLock mWakeLock = null;
protected boolean mSkipCheckHttpServer = false;
@Override
protected void setUp() throws Exception {
super.setUp();
setActivityInitialTouchMode(false);
if (!mSkipClearAppData) {
// We shouldn't clear the data at the end of test, it is needed for debugging.
assertTrue("Unable to clear the app data", clearAppData());
if (FeatureUtilities.isDocumentMode(getInstrumentation().getTargetContext())) {
closeAllChromeActivityAppTasks();
}
}
// Make sure the screen is on during test runs.
PowerManager pm = (PowerManager) getInstrumentation().getTargetContext()
.getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_DIM_WAKE_LOCK
| PowerManager.ACQUIRE_CAUSES_WAKEUP | PowerManager.ON_AFTER_RELEASE, TAG);
mWakeLock.acquire();
// Disable Omaha related activities.
OmahaClient.setEnableCommunication(false);
OmahaClient.setEnableUpdateDetection(false);
if (!mSkipCheckHttpServer) {
TestHttpServerClient.checkServerIsUp();
}
startMainActivity();
}
@Override
protected void tearDown() throws Exception {
assertNotNull("Uninitialized wake lock", mWakeLock);
mWakeLock.release();
super.tearDown();
}
/**
* Called to start the Main Activity, the subclass should implemented with it desired start
* method.
* TODO: Make startMainActivityFromLauncher the default.
*/
public abstract void startMainActivity() throws InterruptedException;
/**
* Matches testString against baseString.
* Returns 0 if there is no match, 1 if an exact match and 2 if a fuzzy match.
*/
protected static int matchUrl(String baseString, String testString) {
if (baseString.equals(testString)) {
return 1;
}
if (baseString.contains(testString)) {
return 2;
}
return 0;
}
/**
* Invokes {@link Instrumentation#startActivitySync(Intent)} and sets the
* test case's activity to the result. See the documentation for
* {@link Instrumentation#startActivitySync(Intent)} on the timing of the
* return, but generally speaking the activity's "onCreate" has completed
* and the activity's main looper has become idle.
*/
protected void startActivityCompletely(Intent intent) {
final Class<?> activityClazz =
FeatureUtilities.isDocumentMode(getInstrumentation().getTargetContext())
? DocumentActivity.class : ChromeTabbedActivity.class;
Instrumentation.ActivityMonitor monitor = getInstrumentation().addMonitor(
activityClazz.getName(), null, false);
Activity activity = getInstrumentation().startActivitySync(intent);
assertNotNull("Main activity did not start", activity);
ChromeActivity chromeActivity = (ChromeActivity)
monitor.waitForActivityWithTimeout(ACTIVITY_START_TIMEOUT_MS);
assertNotNull("ChromeActivity did not start", chromeActivity);
setActivity(chromeActivity);
Log.d(TAG, "startActivityCompletely <<");
}
/**
* Clear all files and folders in the Chrome application directory except 'lib'.
*
* The 'cache' directory is recreated as an empty directory.
*
* @return Whether clearing the application data was successful.
*/
protected boolean clearAppData() throws InterruptedException {
return ApplicationData.clearAppData(getInstrumentation().getTargetContext());
}
/**
* Closes all Chrome activity app tasks. This is for cleaning up Chrome tasks in the recent,
* those are not necessarily associated with a live activity.
*/
private void closeAllChromeActivityAppTasks() throws ClassNotFoundException {
ActivityManager am = (ActivityManager) getInstrumentation().getTargetContext()
.getSystemService(Context.ACTIVITY_SERVICE);
PackageManager pm = getInstrumentation().getTargetContext().getPackageManager();
List<AppTask> taskList = am.getAppTasks();
for (AppTask task : taskList) {
String className = DocumentUtils.getTaskClassName(task, pm);
if (ChromeActivity.class.isAssignableFrom(Class.forName(className))) {
task.finishAndRemoveTask();
}
}
}
/**
* Lets tests specify whether they want prerendering turned on.
* It is on by default. Since in some places different code paths are used for the same feature
* depending of whether instant is on or off (ex: infobars), it is necessary for some tests to
* test with and without instant.
*
* @param enabled whether prerender should be on.
*/
protected void setAllowPrerender(final boolean enabled) {
getInstrumentation().runOnMainSync(new Runnable() {
@Override
public void run() {
PrefServiceBridge.getInstance().setNetworkPredictionOptions(enabled
? NetworkPredictionOptions.NETWORK_PREDICTION_ALWAYS
: NetworkPredictionOptions.NETWORK_PREDICTION_NEVER);
}
});
}
/**
* Starts (synchronously) a drag motion. Normally followed by dragTo() and dragEnd().
*
* @param x
* @param y
* @param downTime (in ms)
* @see TestTouchUtils
*/
protected void dragStart(float x, float y, long downTime) {
TouchCommon.dragStart(getActivity(), x, y, downTime);
}
/**
* Drags / moves (synchronously) to the specified coordinates. Normally preceeded by
* dragStart() and followed by dragEnd()
*
* @param fromX
* @param toX
* @param fromY
* @param toY
* @param stepCount
* @param downTime (in ms)
* @see TestTouchUtils
*/
protected void dragTo(float fromX, float toX, float fromY,
float toY, int stepCount, long downTime) {
TouchCommon.dragTo(getActivity(), fromX, toX, fromY, toY, stepCount, downTime);
}
/**
* Finishes (synchronously) a drag / move at the specified coordinate.
* Normally preceeded by dragStart() and dragTo().
*
* @param x
* @param y
* @param downTime (in ms)
* @see TestTouchUtils
*/
protected void dragEnd(float x, float y, long downTime) {
TouchCommon.dragEnd(getActivity(), x, y, downTime);
}
/**
* Sends (synchronously) a single click to an absolute screen coordinates.
*
* @param x screen absolute
* @param y screen absolute
* @see TestTouchUtils
*/
public void singleClick(float x, float y) {
TouchCommon.singleClick(getActivity(), x, y);
}
/**
* Sends (synchronously) a single click to the View at the specified coordinates.
*
* <p>
* Differs from
* {@link TestTouchUtils#singleClickView(android.app.Instrumentation, View, int, int)}
* as this does not rely on injecting events into the different activity. Injecting events has
* been unreliable for us and simulating the touch events in this manner is just as effective.
*
* @param v The view to be clicked.
* @param x Relative x location to v
* @param y Relative y location to v
*/
public void singleClickView(View v, int x, int y) {
TouchCommon.singleClickView(v, x, y);
}
/**
* Sends (synchronously) a single click to the center of the View.
*
* <p>
* Differs from
* {@link TestTouchUtils#singleClickView(android.app.Instrumentation, View)}
* as this does not rely on injecting events into the different activity. Injecting events has
* been unreliable for us and simulating the touch events in this manner is just as effective.
*
* @param v The view to be clicked.
*/
public void singleClickView(View v) {
TouchCommon.singleClickView(v);
}
/**
* Waits for {@link AsyncTask}'s that have been queued to finish. Note, this
* only waits for tasks that have been started using the default
* {@link java.util.concurrent.Executor}, which executes tasks serially.
*
* @param timeout how long to wait for tasks to complete
*/
public void waitForAsyncTasks(long timeout) throws InterruptedException {
final Semaphore s = new Semaphore(0);
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... arg0) {
s.release();
return null;
}
}.execute();
assertTrue(s.tryAcquire(timeout, TimeUnit.MILLISECONDS));
}
/**
* Navigates to a URL directly without going through the UrlBar. This bypasses the page
* preloading mechanism of the UrlBar.
* @param url The url to load in the current tab.
* @return FULL_PRERENDERED_PAGE_LOAD or PARTIAL_PRERENDERED_PAGE_LOAD if the page has been
* prerendered. DEFAULT_PAGE_LOAD if it had not.
*/
public int loadUrl(final String url) throws IllegalArgumentException, InterruptedException {
return loadUrlInTab(url, PageTransition.TYPED | PageTransition.FROM_ADDRESS_BAR,
getActivity().getActivityTab());
}
/**
* @param url The url of the page to load.
* @param pageTransition The type of transition. see
* {@link org.chromium.content.browser.PageTransition}
* for valid values.
* @param tab The tab to load the url into.
* @return FULL_PRERENDERED_PAGE_LOAD or PARTIAL_PRERENDERED_PAGE_LOAD if the
* page has been prerendered. DEFAULT_PAGE_LOAD if it had not.
*/
public int loadUrlInTab(final String url, final int pageTransition, final Tab tab)
throws InterruptedException {
assertNotNull("Cannot load the url in a null tab", tab);
final AtomicInteger result = new AtomicInteger();
ChromeTabUtils.waitForTabPageLoaded(tab, new Runnable() {
@Override
public void run() {
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
result.set(tab.loadUrl(
new LoadUrlParams(url, pageTransition)));
}
});
}
});
getInstrumentation().waitForIdleSync();
return result.get();
}
/**
* Load a url in a new tab. The {@link Tab} will pretend to be created from a link.
* @param url The url of the page to load.
*/
public void loadUrlInNewTab(final String url) throws InterruptedException {
// TODO(mariakhomenko): There is no current tab creator in document mode, will need
// additional logic here for Document tests.
if (FeatureUtilities.isDocumentMode(getInstrumentation().getContext())) {
fail("Document mode not yet supported.");
}
try {
Tab tab = ThreadUtils.runOnUiThreadBlocking(new Callable<Tab>() {
@Override
public Tab call() throws Exception {
return getActivity().getCurrentTabCreator()
.launchUrl(url, TabLaunchType.FROM_LINK);
}
});
ChromeTabUtils.waitForTabPageLoaded(tab, url);
getInstrumentation().waitForIdleSync();
} catch (ExecutionException e) {
fail("Failed to create new tab");
}
}
/**
* Load a url in a new tab. The {@link Tab} will pretend to be created from a link.
* @param url The url of the page to load.
* @param incognito Whether the new tab should be incognito.
*/
public void loadUrlInNewTab(final String url, final boolean incognito)
throws InterruptedException {
Tab tab = null;
if (FeatureUtilities.isDocumentMode(getInstrumentation().getContext())) {
Runnable activityTrigger = new Runnable() {
@Override
public void run() {
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
ChromeLauncherActivity.launchDocumentInstance(getActivity(), incognito,
ChromeLauncherActivity.LAUNCH_MODE_FOREGROUND, url,
DocumentMetricIds.STARTED_BY_UNKNOWN,
PageTransition.AUTO_TOPLEVEL,
false, null);
}
});
}
};
final DocumentActivity activity = ActivityUtils.waitForActivity(
getInstrumentation(),
incognito ? IncognitoDocumentActivity.class : DocumentActivity.class,
activityTrigger);
CriteriaHelper.pollForUIThreadCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return activity.getActivityTab() != null;
}
});
tab = activity.getActivityTab();
} else {
try {
tab = ThreadUtils.runOnUiThreadBlocking(new Callable<Tab>() {
@Override
public Tab call() throws Exception {
return getActivity().getTabCreator(incognito)
.launchUrl(url, TabLaunchType.FROM_LINK);
}
});
} catch (ExecutionException e) {
fail("Failed to create new tab");
}
}
ChromeTabUtils.waitForTabPageLoaded(tab, url);
getInstrumentation().waitForIdleSync();
}
/**
* Simulates starting Main Activity from launcher, blocks until it is started.
*/
protected void startMainActivityFromLauncher() throws InterruptedException {
startMainActivityWithURL(null);
}
/**
* Starts the Main activity on the specified URL. Passing a null URL ensures the default page is
* loaded, which is the NTP with a new profile .
*/
protected void startMainActivityWithURL(String url) throws InterruptedException {
// Only launch Chrome.
Intent intent = new Intent(
TextUtils.isEmpty(url) ? Intent.ACTION_MAIN : Intent.ACTION_VIEW);
intent.addCategory(Intent.CATEGORY_LAUNCHER);
startMainActivityFromIntent(intent, url);
}
/**
* Starts the Main activity and open a blank page.
* This is faster and less flakyness-prone than starting on the NTP.
*/
protected void startMainActivityOnBlankPage() throws InterruptedException {
startMainActivityWithURL("about:blank");
}
/**
* Starts the Main activity as if it was started from an external application, on the specified
* URL.
*/
protected void startMainActivityFromExternalApp(String url, String appId)
throws InterruptedException {
Intent intent = new Intent(Intent.ACTION_VIEW);
if (appId != null) {
intent.putExtra(Browser.EXTRA_APPLICATION_ID, appId);
}
startMainActivityFromIntent(intent, url);
}
/**
* Starts the Main activity using the passed intent, and using the specified URL.
* This method waits for DEFERRED_STARTUP to fire as well as a subsequent
* idle-sync of the main looper thread, and the initial tab must either
* complete its load or it must crash before this method will return.
*/
protected void startMainActivityFromIntent(Intent intent, String url)
throws InterruptedException {
prepareUrlIntent(intent, url);
final boolean isDocumentMode =
FeatureUtilities.isDocumentMode(getInstrumentation().getContext());
startActivityCompletely(intent);
assertTrue("Tab never selected/initialized.",
CriteriaHelper.pollForUIThreadCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return getActivity().getActivityTab() != null;
}
}));
Tab tab = getActivity().getActivityTab();
ChromeTabUtils.waitForTabPageLoaded(tab, (String) null);
if (!isDocumentMode && tab != null && NewTabPage.isNTPUrl(tab.getUrl())) {
boolean ntpReady = NewTabPageTestUtils.waitForNtpLoaded(tab);
if (!ntpReady && tab.isShowingSadTab()) {
fail("Renderer crashed before NTP finished loading. "
+ "Look at logcat for renderer stack dump.");
}
assertTrue("Initial NTP never fully loaded.", ntpReady);
}
assertTrue("Deferred startup never completed",
CriteriaHelper.pollForUIThreadCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return DeferredStartupHandler.getInstance().isDeferredStartupComplete();
}
}));
assertNotNull(tab);
assertNotNull(tab.getView());
getInstrumentation().waitForIdleSync();
}
/**
* Prepares a URL intent to start the activity.
* @param intent the intent to be modified
* @param url the URL to be used (may be null)
*/
protected Intent prepareUrlIntent(Intent intent, String url) {
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setComponent(new ComponentName(getInstrumentation().getTargetContext(),
ChromeLauncherActivity.class));
if (url != null) {
intent.setData(Uri.parse(url));
}
try {
Method method = getClass().getMethod(getName(), (Class[]) null);
if (method.isAnnotationPresent(RenderProcessLimit.class)) {
RenderProcessLimit limit = method.getAnnotation(RenderProcessLimit.class);
intent.putExtra(ChromeTabbedActivity.INTENT_EXTRA_TEST_RENDER_PROCESS_LIMIT,
limit.value());
}
} catch (Exception ex) {
// Ignore exception.
}
return intent;
}
/**
* Open an incognito tab by invoking the 'new incognito' menu item.
* Returns when receiving the 'PAGE_LOAD_FINISHED' notification.
*/
protected void newIncognitoTabFromMenu() throws InterruptedException {
Tab tab = null;
if (FeatureUtilities.isDocumentMode(getInstrumentation().getContext())) {
final IncognitoDocumentActivity activity = ActivityUtils.waitForActivity(
getInstrumentation(), IncognitoDocumentActivity.class,
new Runnable() {
@Override
public void run() {
MenuUtils.invokeCustomMenuActionSync(
getInstrumentation(), getActivity(),
R.id.new_incognito_tab_menu_id);
}
});
CriteriaHelper.pollForUIThreadCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return activity.getActivityTab() != null;
}
});
tab = activity.getActivityTab();
} else {
final CallbackHelper createdCallback = new CallbackHelper();
final CallbackHelper selectedCallback = new CallbackHelper();
TabModel incognitoTabModel = getActivity().getTabModelSelector().getModel(true);
TabModelObserver observer = new EmptyTabModelObserver() {
@Override
public void didAddTab(Tab tab, TabLaunchType type) {
createdCallback.notifyCalled();
}
@Override
public void didSelectTab(Tab tab, TabSelectionType type, int lastId) {
selectedCallback.notifyCalled();
}
};
incognitoTabModel.addObserver(observer);
MenuUtils.invokeCustomMenuActionSync(getInstrumentation(), getActivity(),
R.id.new_incognito_tab_menu_id);
try {
createdCallback.waitForCallback(0);
} catch (TimeoutException ex) {
fail("Never received tab created event");
}
try {
selectedCallback.waitForCallback(0);
} catch (TimeoutException ex) {
fail("Never received tab selected event");
}
incognitoTabModel.removeObserver(observer);
tab = getActivity().getActivityTab();
}
ChromeTabUtils.waitForTabPageLoaded(tab, (String) null);
Assert.assertTrue("NTP never fully loaded.",
NewTabPageTestUtils.waitForNtpLoaded(tab));
getInstrumentation().waitForIdleSync();
Log.d(TAG, "newIncognitoTabFromMenu <<");
}
/**
* New multiple incognito tabs by invoking the 'new incognito' menu item n times.
* @param n The number of tabs you want to create.
*/
protected void newIncognitoTabsFromMenu(int n)
throws InterruptedException {
while (n > 0) {
newIncognitoTabFromMenu();
--n;
}
}
/**
* @return The number of incognito tabs currently open.
*/
protected int incognitoTabsCount() {
return ThreadUtils.runOnUiThreadBlockingNoException(new Callable<Integer>() {
@Override
public Integer call() {
TabModelSelector tabModelSelector;
if (FeatureUtilities.isDocumentMode(getInstrumentation().getContext())) {
tabModelSelector = ChromeMobileApplication.getDocumentTabModelSelector();
} else {
tabModelSelector = getActivity().getTabModelSelector();
}
return tabModelSelector.getModel(true).getCount();
}
});
}
/**
* Looks up the Omnibox in the view hierarchy and types the specified
* text into it, requesting focus and using an inter-character delay of
* 200ms.
*
* @param oneCharAtATime Whether to type text one character at a time or all at once.
*
* @throws InterruptedException
*/
public void typeInOmnibox(final String text, final boolean oneCharAtATime)
throws InterruptedException {
final UrlBar urlBar = (UrlBar) getActivity().findViewById(R.id.url_bar);
assertNotNull(urlBar);
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
urlBar.requestFocus();
if (!oneCharAtATime) {
urlBar.setText(text);
}
}
});
if (oneCharAtATime) {
final Instrumentation instrumentation = getInstrumentation();
for (int i = 0; i < text.length(); ++i) {
instrumentation.sendStringSync(text.substring(i, i + 1));
// Let's put some delay between key strokes to simulate a user pressing the keys.
Thread.sleep(20);
}
}
}
/**
* Searches for a given suggestion after typing given text in the Omnibox.
*
* @param inputText Input text to type into the Omnibox.
* @param displayText Suggestion text expected to be found. Passing in null ignores this field.
* @param url URL expected to be found. Passing in null ignores this field.
* @param type Type of suggestion expected to be found. Passing in null ignores this field.
*
* @throws InterruptedException
*/
protected OmniboxSuggestion findOmniboxSuggestion(String inputText, String displayText,
String url, OmniboxSuggestion.Type type) throws InterruptedException {
long endTime = System.currentTimeMillis() + OMNIBOX_FIND_SUGGESTION_TIMEOUT_MS;
// Multiple suggestion events may occur before the one we're interested in is received.
final CallbackHelper onSuggestionsReceivedHelper = new CallbackHelper();
final LocationBarLayout locationBar =
(LocationBarLayout) getActivity().findViewById(R.id.location_bar);
locationBar.setAutocompleteController(new AutocompleteController(locationBar) {
@Override
public void onSuggestionsReceived(
List<OmniboxSuggestion> suggestions,
String inlineAutocompleteText,
long currentNativeAutocompleteResult) {
super.onSuggestionsReceived(
suggestions, inlineAutocompleteText, currentNativeAutocompleteResult);
onSuggestionsReceivedHelper.notifyCalled();
}
});
try {
typeInOmnibox(inputText, false);
while (true) {
try {
int callbackCount = onSuggestionsReceivedHelper.getCallCount();
onSuggestionsReceivedHelper.waitForCallback(
callbackCount, 1,
endTime - System.currentTimeMillis(), TimeUnit.MILLISECONDS);
} catch (TimeoutException exception) {
return null;
}
// Wait for suggestions to show up.
assertTrue(CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return ((LocationBarLayout) getActivity().findViewById(
R.id.location_bar)).getSuggestionList() != null;
}
}, 3000, 10));
final ListView suggestionListView = locationBar.getSuggestionList();
OmniboxResultItem popupItem = (OmniboxResultItem) suggestionListView
.getItemAtPosition(0);
OmniboxSuggestion suggestion = popupItem.getSuggestion();
if (suggestionListView.getCount() == 1
&& suggestion.getDisplayText().equals(inputText)
&& !suggestion.getDisplayText().equals(displayText)) {
// If there is only one suggestion and it's the same as inputText,
// wait for other suggestions before looking for the one we want.
CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return suggestionListView.getCount() > 1;
}
}, 3000, 10);
}
int count = suggestionListView.getCount();
for (int i = 0; i < count; i++) {
popupItem = (OmniboxResultItem) suggestionListView.getItemAtPosition(i);
suggestion = popupItem.getSuggestion();
if (type != null && suggestion.getType() != type) {
continue;
}
if (displayText != null && !suggestion.getDisplayText().equals(displayText)) {
continue;
}
if (url != null && !suggestion.getUrl().equals(url)) {
continue;
}
return suggestion;
}
}
} finally {
locationBar.setAutocompleteController(new AutocompleteController(locationBar));
}
}
/**
* Returns the infobars being displayed by the current tab, or null if they don't exist.
*/
protected List<InfoBar> getInfoBars() {
Tab currentTab = getActivity().getActivityTab();
if (currentTab == null) {
return null;
}
if (currentTab.getInfoBarContainer() != null) {
return currentTab.getInfoBarContainer().getInfoBars();
} else {
return null;
}
}
/**
* Launches the preferences menu and starts the preferences activity named fragmentName.
* Returns the activity that was started.
*/
protected Preferences startPreferences(String fragmentName) {
Context context = getInstrumentation().getTargetContext();
Intent intent = PreferencesLauncher.createIntentForSettingsPage(context, fragmentName);
Activity activity = getInstrumentation().startActivitySync(intent);
assertTrue(activity instanceof Preferences);
return (Preferences) activity;
}
/**
* Executes the given snippet of JavaScript code within the current tab. Returns the result of
* its execution in JSON format.
* @throws InterruptedException
*/
protected String runJavaScriptCodeInCurrentTab(String code) throws InterruptedException,
TimeoutException {
return JavaScriptUtils.executeJavaScriptAndWaitForResult(
getActivity().getCurrentContentViewCore().getWebContents(), code);
}
@Override
protected void runTest() throws Throwable {
boolean shouldRun = true;
String perfTagAnalysisString = "";
try {
shouldRun = RestrictedInstrumentationTestCase.shouldRunTest(this);
perfTagAnalysisString = setupPotentialPerfTest(shouldRun);
} catch (Exception e) {
// eat the exception here; super.runTest() will catch it again and handle it properly
}
if (shouldRun) super.runTest();
endPerfTest(perfTagAnalysisString);
}
/**
* Waits till the ContentViewCore receives the expected page scale factor
* from the compositor and asserts that this happens.
*
* Upstream {@code ContentShellTestBase} has the same copy. Also, this is a temporary solution
* for waiting a page load. Please refer to the bug at the upstream function.
*/
protected void assertWaitForPageScaleFactorMatch(final float expectedScale)
throws InterruptedException {
boolean scaleFactorMatch = CriteriaHelper.pollForCriteria(new Criteria() {
@Override
public boolean isSatisfied() {
return Math.abs(getActivity().getCurrentContentViewCore().getScale()
- expectedScale) < FLOAT_EPSILON;
}
});
assertTrue("Expecting scale factor of: " + expectedScale + ", got: "
+ getActivity().getCurrentContentViewCore().getScale(), scaleFactorMatch);
}
/**
* This method creates a special string that tells the python test harness what
* trace calls to track for this particular test run. It can support multiple trace calls for
* each test and will make a new graph entry for all of them. It should be noted that this
* method eats all exceptions. This is so that it can never be the cause of a test failure.
* We still need to call this method even if we know the test will not run (ie: willTestRun is
* false). This is because this method lets the python test harness know not to expect any
* perf output in this case. In the case that the autoTrace parameter is set for the current
* test method, this will also start the PerfTrace facility automatically.
*
* @param willTestRun Whether or not this test will actually be run.
* @return A specially formatted string that contains which JSON perf markers to look at. This
* will be analyzed by the perf test harness.
*/
@SuppressFBWarnings({
"REC_CATCH_EXCEPTION",
"RV_RETURN_VALUE_IGNORED_BAD_PRACTICE",
})
private String setupPotentialPerfTest(boolean willTestRun) {
File perfFile = getInstrumentation().getTargetContext().getFileStreamPath(
PERF_OUTPUT_FILE);
perfFile.delete();
PerfTraceEvent.setOutputFile(perfFile);
String perfAnnotationString = "";
try {
Method method = getClass().getMethod(getName(), (Class[]) null);
PerfTest annotation = method.getAnnotation(PerfTest.class);
if (annotation != null) {
StringBuilder annotationData = new StringBuilder();
annotationData.append(String.format(PERF_ANNOTATION_FORMAT, method.getName()));
if (!willTestRun) {
annotationData.append(PERF_NORUN_TAG);
} else {
// Grab the minimum number of trace calls we will track (if names(),
// graphNames(), and graphValues() do not have the same number of elements, we
// will track as many as we can given the data available.
final int maxIndex = Math.min(annotation.traceNames().length, Math.min(
annotation.graphNames().length, annotation.seriesNames().length));
List<String> allNames = new LinkedList<String>();
for (int i = 0; i < maxIndex; ++i) {
// Prune out all of ',' and ';' from the strings. Replace them with '-'.
String name = annotation.traceNames()[i].replaceAll("[,;]", "-");
allNames.add(name);
String graphName = annotation.graphNames()[i].replaceAll("[,;]", "-");
String seriesName = annotation.seriesNames()[i].replaceAll("[,;]", "-");
if (annotation.traceTiming()) {
annotationData.append(name).append(",")
.append(graphName).append(",")
.append(seriesName).append(';');
}
// If memory tracing is enabled, add an additional graph for each one
// defined to track timing perf that will track the corresponding memory
// usage.
// Keep the series name the same, but just append a memory identifying
// prefix to the graph.
if (annotation.traceMemory()) {
String memName = PerfTraceEvent.makeMemoryTraceNameFromTimingName(name);
String memGraphName = PerfTraceEvent.makeSafeTraceName(
graphName, MEMORY_TRACE_GRAPH_SUFFIX);
annotationData.append(memName).append(",")
.append(memGraphName).append(",")
.append(seriesName).append(';');
allNames.add(memName);
}
}
// We only record perf trace events for the names explicitly listed.
PerfTraceEvent.setFilter(allNames);
// Figure out if we should automatically start or stop the trace.
if (annotation.autoTrace()) {
PerfTraceEvent.setEnabled(true);
}
PerfTraceEvent.setTimingTrackingEnabled(annotation.traceTiming());
PerfTraceEvent.setMemoryTrackingEnabled(annotation.traceMemory());
}
perfAnnotationString = annotationData.toString();
}
} catch (Exception ex) {
// Eat exception here.
}
return perfAnnotationString;
}
/**
* This handles cleaning up the performance component of this test if it was a UI Perf test.
* This includes potentially shutting down PerfTraceEvent. This method eats all exceptions so
* that it can never be the cause of a test failure. The test harness will wait for
* {@code perfTagAnalysisString} to show up in the logcat before processing the JSON perf file,
* giving this method the chance to flush and dump the performance data before the harness reads
* it.
*
* @param perfTagAnalysisString A specially formatted string that tells the perf test harness
* which perf tags to analyze.
*/
private void endPerfTest(String perfTagAnalysisString) {
try {
Method method = getClass().getMethod(getName(), (Class[]) null);
PerfTest annotation = method.getAnnotation(PerfTest.class);
if (annotation != null) {
if (PerfTraceEvent.enabled()) {
PerfTraceEvent.setEnabled(false);
}
System.out.println(perfTagAnalysisString);
}
} catch (Exception ex) {
// Eat exception here.
}
}
}
|
package net.openhft.chronicle.queue.impl.single;
import net.openhft.chronicle.queue.ChronicleQueue;
import net.openhft.chronicle.queue.ChronicleQueueTestBase;
import net.openhft.chronicle.queue.ExcerptAppender;
import net.openhft.chronicle.queue.ExcerptTailer;
import net.openhft.chronicle.queue.impl.async.AsyncChronicleQueueBuilder;
import org.junit.Ignore;
import org.junit.Test;
import java.io.IOException;
public class AsyncChronicleQueueTest extends ChronicleQueueTestBase {
@Ignore
@Test
public void testAppendAndRead() throws IOException {
final ChronicleQueue queue = SingleChronicleQueueBuilder.text(getTmpDir()).build();
final ChronicleQueue async = new AsyncChronicleQueueBuilder(queue).build();
final ExcerptAppender appender = async.createAppender();
for (int i = 0; i < 20; i++) {
final int n = i;
appender.writeDocument(w -> w.write(TestKey.test).int32(n));
}
final ExcerptTailer tailer = queue.createTailer();
for (int i = 0; i < 20;) {
final int n = i;
//if(tailer.readDocument(r -> assertEquals(n, r.read(TestKey.test).int32()))) {
if(tailer.readDocument(r -> LOGGER.info(">>> {}", r.read(TestKey.test).int32()))) {
i++;
}
}
}
}
|
package org.sagebionetworks.bridge.models;
import java.util.Objects;
import java.util.concurrent.ExecutionException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.sagebionetworks.bridge.dynamodb.DynamoInitializer;
import org.sagebionetworks.bridge.exceptions.BridgeServiceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
public final class ClientInfo {
private static Logger logger = LoggerFactory.getLogger(ClientInfo.class);
/**
* A cache of ClientInfo objects that have already been parsed from user agent strings.
* We're using this, rather than ConcurrentHashMap, because external clients submit this string,
* and thus could create an infinite number of them, starving the server. The cache will protect
* against this with its size limit.
*/
private static LoadingCache<String, ClientInfo> userAgents = CacheBuilder.newBuilder()
.maximumSize(500)
.build(new CacheLoader<String,ClientInfo>() {
@Override
public ClientInfo load(String userAgent) throws Exception {
return ClientInfo.parseUserAgentString(userAgent);
}
});
/**
* A User-Agent string that does not follow our format is simply an unknown
* client, and no filtering will be done for such a client. It is represented with
* a null object that is the ClientInfo object with all null fields. It is still
* logged as we find it in the request.
*/
public static final ClientInfo UNKNOWN_CLIENT = new ClientInfo.Builder().build();
/**
* For example, "Unknown Client/14 BridgeJavaSDK/10".
*/
private static final Pattern SHORT_STRING = Pattern.compile("^([^/]+)\\/(\\d+)\\s([^/\\(]*)\\/(\\d+)($)");
/**
* For example, "Asthma/26 (Unknown iPhone; iPhone OS 9.1) BridgeSDK/4".
*/
private static final Pattern LONG_STRING = Pattern
.compile("^([^/]+)\\/(\\d+)\\s\\(([^;]+);([^\\)]*)\\)\\s([^/]*)\\/(\\d+)($)");
private final String appName;
private final Integer appVersion;
private final String osName;
private final String osVersion;
private final String sdkName;
private final Integer sdkVersion;
private ClientInfo(String appName, Integer appVersion, String osName, String osVersion, String sdkName,
Integer sdkVersion) {
this.appName = appName;
this.appVersion = appVersion;
this.osName = osName;
this.osVersion = osVersion;
this.sdkName = sdkName;
this.sdkVersion = sdkVersion;
}
public String getAppName() {
return appName;
}
public Integer getAppVersion() {
return appVersion;
}
public String getOsName() {
return osName;
}
public String getOsVersion() {
return osVersion;
}
public String getSdkName() {
return sdkName;
}
public Integer getSdkVersion() {
return sdkVersion;
}
public boolean isTargetedAppVersion(Integer minValue, Integer maxValue) {
// If there's no declared client version, it matches anything.
if (appVersion != null) {
// Otherwise we can't be outside of either range boundary if the boundary
// is declared.
if ((minValue != null && appVersion.intValue() < minValue.intValue()) ||
(maxValue != null && appVersion.intValue() > maxValue.intValue())) {
return false;
}
}
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Objects.hashCode(appName);
result = prime * result + Objects.hashCode(appVersion);
result = prime * result + Objects.hashCode(osName);
result = prime * result + Objects.hashCode(osVersion);
result = prime * result + Objects.hashCode(sdkName);
result = prime * result + Objects.hashCode(sdkVersion);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null || getClass() != obj.getClass())
return false;
ClientInfo other = (ClientInfo) obj;
return Objects.equals(appName, other.appName) && Objects.equals(appVersion, other.appVersion)
&& Objects.equals(osName, other.osName) && Objects.equals(osVersion, other.osVersion)
&& Objects.equals(sdkName, other.sdkName) && Objects.equals(sdkVersion, other.sdkVersion);
}
@Override
public String toString() {
return "ClientInfo [appName=" + appName + ", appVersion=" + appVersion + ", osName=" + osName + ", osVersion="
+ osVersion + ", sdkName=" + sdkName + ", sdkVersion=" + sdkVersion + "]";
}
static class Builder {
private String appName;
private Integer appVersion;
private String osName;
private String osVersion;
private String sdkName;
private Integer sdkVersion;
public Builder withAppName(String appName) {
this.appName = appName;
return this;
}
public Builder withAppVersion(Integer appVersion) {
this.appVersion = appVersion;
return this;
}
public Builder withOsName(String osName) {
this.osName = osName;
return this;
}
public Builder withOsVersion(String osVersion) {
this.osVersion = osVersion;
return this;
}
public Builder withSdkName(String sdkName) {
this.sdkName = sdkName;
return this;
}
public Builder withSdkVersion(Integer sdkVersion) {
this.sdkVersion = sdkVersion;
return this;
}
/**
* It's valid to have a client info object with no fields, if the
* User-Agent header is not in our prescribed format.
*/
public ClientInfo build() {
return new ClientInfo(appName, appVersion, osName, osVersion, sdkName, sdkVersion);
}
}
/**
* Get a ClientInfo object given a User-Agent header string. These values are cached and
* headers that are not in the prescribed format return an empty client info object.
* @param userAgent
* @return
*/
public static ClientInfo fromUserAgentCache(String userAgent) {
if (!StringUtils.isBlank(userAgent)) {
try {
return userAgents.get(userAgent);
} catch(ExecutionException e) {
// This should not happen, the CacheLoader doesn't throw exceptions
// Log it and return UNKNOWN_CLIENT
logger.error(e.getMessage(), e);
}
}
return UNKNOWN_CLIENT;
}
static ClientInfo parseUserAgentString(String ua) {
ClientInfo info = UNKNOWN_CLIENT;
if (!StringUtils.isBlank(ua)) {
info = parseLongUserAgent(ua);
if (info == UNKNOWN_CLIENT) {
info = parseShortUserAgent(ua);
}
}
return info;
}
private static ClientInfo parseLongUserAgent(String ua) {
Matcher matcher = LONG_STRING.matcher(ua);
if (matcher.matches()) {
return new ClientInfo.Builder()
.withAppName(matcher.group(1).trim())
.withAppVersion(Integer.parseInt(matcher.group(2).trim()))
.withOsName(matcher.group(3).trim())
.withOsVersion(matcher.group(4).trim())
.withSdkName(matcher.group(5).trim())
.withSdkVersion(Integer.parseInt(matcher.group(6).trim())).build();
}
return UNKNOWN_CLIENT;
}
private static ClientInfo parseShortUserAgent(String ua) {
Matcher matcher = SHORT_STRING.matcher(ua);
if (matcher.matches()) {
return new ClientInfo.Builder()
.withAppName(matcher.group(1).trim())
.withAppVersion(Integer.parseInt(matcher.group(2).trim()))
.withSdkName(matcher.group(3).trim())
.withSdkVersion(Integer.parseInt(matcher.group(4).trim())).build();
}
return UNKNOWN_CLIENT;
}
}
|
package io.github.greyp9.arwo.core.table.html;
import io.github.greyp9.arwo.core.locus.Locus;
import io.github.greyp9.arwo.core.page.Page;
import io.github.greyp9.arwo.core.table.baseline.BaselineTable;
import io.github.greyp9.arwo.core.table.model.Table;
import io.github.greyp9.arwo.core.table.model.TableContext;
import io.github.greyp9.arwo.core.table.row.RowSet;
import io.github.greyp9.arwo.core.table.state.ViewState;
import org.w3c.dom.Element;
import java.io.IOException;
public class TableView {
private final Table table;
private final TableContext context;
private final ViewState viewState;
public TableView(final Table table, final TableContext context) {
this.table = table;
this.context = context;
this.viewState = context.getViewState();
this.viewState.setPage(Page.Factory.fixPage(viewState.getPage(), table.getRows()));
}
public final void addContentTo(final Element html) throws IOException {
final String baselineID = ((table.getTitle() == null) ? table.getID() : table.getTitle());
// update baseline (if needed)
viewState.addBaseline(baselineID, table);
// display normal table, or baseline table (if baseline and table identity column exist)
final boolean baselineSet = viewState.getBaselines().containsKey(baselineID);
final String identity = table.getMetaData().getIdentity();
if (baselineSet && (identity != null)) {
final RowSet rowSetBaseline = viewState.getBaselines().get(baselineID);
final BaselineTable baselineTable = new BaselineTable(table, rowSetBaseline);
final RowSet rowSetMerge = baselineTable.merge(identity);
final Table tableMerge = new Table(rowSetMerge, table.getSorts(), table.getFilters(),
table.getTitle(), table.getSummary());
final Locus locus = context.getLocus();
final String baselineDate = locus.toString(rowSetBaseline.getDate());
//final String baselineDuration = DurationU.duration(rowSetBaseline.getDate(), table.getDate());
//final String baselineText = Value.join(" ", baselineDate, baselineDuration);
tableMerge.getProperties().setProperty(Table.Const.FOOTER_R, baselineDate);
final TableViewInner tableView = new TableViewInner(tableMerge, context);
tableView.addContentTo(html);
} else {
final TableViewInner tableView = new TableViewInner(table, context);
tableView.addContentTo(html);
}
}
}
|
package edu.duke.cabig.c3pr.web.registration.tabs;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.springframework.validation.Errors;
import org.springframework.web.util.WebUtils;
import edu.duke.cabig.c3pr.constants.ConsentRequired;
import edu.duke.cabig.c3pr.constants.ICD9DiseaseSiteCodeDepth;
import edu.duke.cabig.c3pr.constants.RegistrationWorkFlowStatus;
import edu.duke.cabig.c3pr.dao.ICD9DiseaseSiteDao;
import edu.duke.cabig.c3pr.domain.ICD9DiseaseSite;
import edu.duke.cabig.c3pr.domain.StudyInvestigator;
import edu.duke.cabig.c3pr.domain.StudySiteStudyVersion;
import edu.duke.cabig.c3pr.domain.StudySubject;
import edu.duke.cabig.c3pr.domain.StudySubjectConsentVersion;
import edu.duke.cabig.c3pr.domain.StudyVersion;
import edu.duke.cabig.c3pr.exception.C3PRCodedRuntimeException;
import edu.duke.cabig.c3pr.utils.Lov;
import edu.duke.cabig.c3pr.utils.StringUtils;
import edu.duke.cabig.c3pr.web.registration.StudySubjectWrapper;
public class EnrollmentDetailsTab extends RegistrationTab<StudySubjectWrapper> {
private ICD9DiseaseSiteDao icd9DiseaseSiteDao;
public void setIcd9DiseaseSiteDao(ICD9DiseaseSiteDao icd9DiseaseSiteDao) {
this.icd9DiseaseSiteDao = icd9DiseaseSiteDao;
}
public EnrollmentDetailsTab() {
super("Enrollment Details", "Enrollment Details", "registration/reg_registration_details");
}
@Override
public Map referenceData(HttpServletRequest request,
StudySubjectWrapper command) {
Map refdata=super.referenceData(request, command);
Map<String, List<Lov>> configMap = configurationProperty.getMap();
refdata.put("paymentMethods", configMap.get("paymentMethods"));
refdata.put("diseaseSiteCategories", getDiseaseSiteCategories());
return refdata;
}
public List<ICD9DiseaseSite> getDiseaseSiteCategories(){
List<ICD9DiseaseSite> icd9DiseaseSites = new ArrayList<ICD9DiseaseSite>();
icd9DiseaseSites.addAll(icd9DiseaseSiteDao.getByLevel(ICD9DiseaseSiteCodeDepth.LEVEL1));
return icd9DiseaseSites;
}
@Override
public void postProcess(HttpServletRequest request, StudySubjectWrapper command, Errors errors) {
StudySubjectWrapper wrapper = (StudySubjectWrapper) command ;
StudySubject studySubject = wrapper.getStudySubject();
if(request.getSession().getAttribute("studyVersion") !=null ){
request.getSession().removeAttribute("studyVersion");
}
if(request.getSession().getAttribute("canEnroll") !=null ){
request.getSession().removeAttribute("canEnroll");
}
if(studySubject.getRegWorkflowStatus() != RegistrationWorkFlowStatus.ENROLLED && studySubject.getScheduledEpoch().getEpoch().getEnrollmentIndicator()){
studySubject.getScheduledEpoch().setStartDate(studySubject.getStartDate());
}
if(WebUtils.hasSubmitParameter(request, "updateStudyVersion") && request.getParameter("updateStudyVersion").equals("true")){
Date consentSignedDate = null;
try {
consentSignedDate = new SimpleDateFormat("MM/dd/yyyy").parse(request.getParameter("consentSignedDate"));
request.setAttribute("consentSignedDate", request.getParameter("consentSignedDate"));
} catch (ParseException e) {
throw new RuntimeException("Invalid Submit. Registration Date is invalid");
}
try{
studySubject.changeStudyVersion(consentSignedDate);
} catch(C3PRCodedRuntimeException ex){
if(ex.getExceptionCode()==101){
errors.reject("tempProperty","Unable to find an epoch with same name in the study version");
}
}
getRegistrationControllerUtils().buildCommandObject(studySubject);
getRegistrationControllerUtils().addConsents(studySubject);
return;
}
// set the scheduled epoch start date to registration start date for first time enrollment
if(command.getStudySubject().getScheduledEpoch().getEpoch().getEnrollmentIndicator() &&
command.getStudySubject().getRegWorkflowStatus() != RegistrationWorkFlowStatus.ENROLLED){
command.getStudySubject().getScheduledEpoch().setStartDate(command.getStudySubject().getStartDate());
}
if(!StringUtils.isBlank(request.getParameter("treatingPhysicianInternal"))){
for(StudyInvestigator studyInvestigator : studySubject.getStudySite().getStudyInvestigators()){
if(studyInvestigator.getId()==Integer.parseInt(request.getParameter("treatingPhysicianInternal"))){
studySubject.setTreatingPhysician(studyInvestigator);
break;
}
}
}
if(command.getStudySubject().getDiseaseHistory() != null){
if(StringUtils.equals(command.getStudySubject().getDiseaseHistory().getOtherPrimaryDiseaseSiteCode(), "(Begin typing here)")){
command.getStudySubject().getDiseaseHistory().setOtherPrimaryDiseaseSiteCode("");
}
}
StudySiteStudyVersion studySiteStudyVersion = ((StudySubjectWrapper)command).getStudySubject().getStudySubjectStudyVersion().getStudySiteStudyVersion();
for(StudySubjectConsentVersion studySubjectConsentVersion : command.getStudySubject().getStudySubjectStudyVersion().getStudySubjectConsentVersions()){
if(studySubjectConsentVersion.getInformedConsentSignedDateStr()!=null && studySubjectConsentVersion.getInformedConsentSignedDateStr() != ""){
if (!studySiteStudyVersion.getStudySite().canEnroll(studySiteStudyVersion.getStudyVersion() , studySubjectConsentVersion.getInformedConsentSignedDate())){
request.getSession().setAttribute("canEnroll",false);
StudyVersion studyVersion = studySiteStudyVersion.getStudySite().getActiveStudyVersion(studySubjectConsentVersion.getInformedConsentSignedDate());
request.getSession().setAttribute("studyVersion",studyVersion);
errors.reject("tempProperty","Informed consent signed date does not correspond to the selected study version");
break;
}
}
}
}
@Override
public void validate(StudySubjectWrapper command, Errors errors) {
if(command.getStudySubject().getRegWorkflowStatus() != RegistrationWorkFlowStatus.ENROLLED &&
command.getStudySubject().getScheduledEpoch().getEpoch().getEnrollmentIndicator()) {
Date date = command.getStudySubject().getStartDate();
if(date !=null){
if(date.after(new Date())){
errors.reject("tempProperty", "Registration date cannot be a future date");
}
}
for(StudySubjectConsentVersion studySubjectConsentVersion : command.getStudySubject().getStudySubjectStudyVersion().getStudySubjectConsentVersions()){
if (studySubjectConsentVersion
.getInformedConsentSignedDate() != null) {
if(studySubjectConsentVersion.getInformedConsentSignedDate().after(new Date())){
errors.reject("tempProperty", "Consent signed date cannot be a future date");
}
if(date !=null){
if (date.before(studySubjectConsentVersion.getInformedConsentSignedDate())) {
errors
.reject("studySubject.startDate",
"Registration date cannot be prior to informed consent signed date");
}
}
}
}
}
if(command.getStudySubject().getStudySite().getStudy().getConsentRequired() == ConsentRequired.ONE){
boolean atLeastOneConsentSigned = false;
for(StudySubjectConsentVersion studySubjectConsentVersion : command.getStudySubject().getStudySubjectStudyVersion().getStudySubjectConsentVersions()){
if(studySubjectConsentVersion.getInformedConsentSignedDateStr()!=null && studySubjectConsentVersion.getInformedConsentSignedDateStr() != ""){
atLeastOneConsentSigned = true;
}
}
if(!atLeastOneConsentSigned){
errors.reject("tempProperty","At least one consent signed date is required.");
}
} else if (command.getStudySubject().getStudySite().getStudy().getConsentRequired() == ConsentRequired.ALL){
boolean allConsentsSigned = true;
for(StudySubjectConsentVersion studySubjectConsentVersion : command.getStudySubject().getStudySubjectStudyVersion().getStudySubjectConsentVersions()){
if(studySubjectConsentVersion.getInformedConsentSignedDateStr() ==null || studySubjectConsentVersion.getInformedConsentSignedDateStr() == ""){
allConsentsSigned = false;
}
}
if(!allConsentsSigned){
errors.reject("tempProperty","All consent signed dates are mandatory.");
}
}
}
}
|
package com.codeaffine.extras.jdt.internal.junitstatus;
import static com.google.common.base.Objects.equal;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ControlAdapter;
import org.eclipse.swt.events.ControlEvent;
import org.eclipse.swt.events.PaintEvent;
import org.eclipse.swt.events.PaintListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.GC;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.widgets.Canvas;
import org.eclipse.swt.widgets.Composite;
import com.codeaffine.extras.jdt.internal.junitstatus.TextAnimation.TextAnimationPainter;
public class JUnitProgressBar extends Canvas implements TextAnimationPainter {
private static final int ARC_SIZE = 3;
private static final int DEFAULT_WIDTH = 160;
private static final int DEFAULT_HEIGHT = 18;
private final TextAnimation textAnimation;
private String text;
private int textAlignment;
private int maximum;
private int selection;
private Color barColor;
public JUnitProgressBar( Composite parent ) {
super( parent, SWT.NO_BACKGROUND | SWT.DOUBLE_BUFFERED | SWT.NO_FOCUS );
text = "";
textAlignment = SWT.LEFT;
textAnimation = new TextAnimation( this, this );
registerListeners();
}
public void setValues( String text, int textAlignment, Color barColor, int selection, int maximum ) {
if( valuesChanged( text, textAlignment, barColor, selection, maximum ) ) {
this.text = text;
this.textAnimation.setText( text );
this.textAlignment = textAlignment;
this.barColor = barColor;
this.selection = selection;
this.maximum = maximum;
redraw();
}
}
public void setMaximum( int maximum ) {
setValues( text, textAlignment, barColor, selection, maximum );
}
public int getMaximum() {
return maximum;
}
public void setSelection( int selection ) {
setValues( text, textAlignment, barColor, selection, maximum );
}
public int getSelection() {
return selection;
}
public void setBarColor( Color barColor ) {
setValues( text, textAlignment, barColor, selection, maximum );
}
public Color getBarColor() {
return barColor;
}
public void setText( String text ) {
setValues( text, textAlignment, barColor, selection, maximum );
}
public String getText() {
return text;
}
public void setTextAlignment( int textAlignment ) {
setValues( text, textAlignment, barColor, selection, maximum );
}
public int getTextAlignment() {
return textAlignment;
}
@Override
public void drawText( TextAnimation textAnimation ) {
redraw();
}
@Override
public Point computeSize( int wHint, int hHint, boolean changed ) {
checkWidget();
Point result = new Point( DEFAULT_WIDTH, DEFAULT_HEIGHT );
if( wHint != SWT.DEFAULT ) {
result.x = wHint;
}
if( hHint != SWT.DEFAULT ) {
result.y = hHint;
}
return result;
}
private void registerListeners() {
addControlListener( new ControlAdapter() {
@Override
public void controlResized( ControlEvent event ) {
redraw();
}
} );
addPaintListener( new PaintListener() {
@Override
public void paintControl( PaintEvent event ) {
paint( event.gc );
}
} );
}
private void paint( GC gc ) {
gc.fillRectangle( getClientArea() );
drawRectangle( gc );
drawBar( gc );
drawText( gc );
}
private void drawRectangle( GC gc ) {
if( maximum > 0 ) {
Rectangle clientArea = getClientArea();
int x = clientArea.x;
int y = clientArea.y;
int width = clientArea.width - 1;
int height = clientArea.height - 1 - 1;
gc.setAlpha( 255 );
gc.setForeground( getDisplay().getSystemColor( SWT.COLOR_WIDGET_NORMAL_SHADOW ) );
gc.drawRoundRectangle( x, y, width, height, ARC_SIZE, ARC_SIZE );
}
}
private void drawBar( GC gc ) {
gc.setAlpha( 200 );
if( barColor != null ) {
gc.setBackground( barColor );
}
Rectangle clientArea = getClientArea();
int barWidth = Math.min( clientArea.width - 2, getBarWidth() );
gc.fillRoundRectangle( 1, 1, barWidth, clientArea.height - 2 - 1, ARC_SIZE, ARC_SIZE );
}
private void drawText( GC gc ) {
gc.setAlpha( 255 );
Rectangle rect = getClientArea();
Rectangle clientArea = new Rectangle( rect.x + 1, rect.y + 1, rect.width - 2, rect.height - 2 );
Point textSize = gc.textExtent( text );
int x = 3;
if( textAlignment == SWT.CENTER ) {
x = ( clientArea.width - textSize.x ) / 2;
}
int y = ( clientArea.height - textSize.y ) / 2 + 1;
gc.setForeground( getDisplay().getSystemColor( SWT.COLOR_WIDGET_FOREGROUND ) );
gc.drawText( textAnimation.getAnimatedText(), x, y, SWT.DRAW_TRANSPARENT );
}
int getBarWidth() {
int result = selection;
Rectangle clientArea = getClientArea();
if( maximum > 0 ) {
if( clientArea.width != 0 ) {
result = Math.max( 0, selection * ( clientArea.width - 2 ) / maximum );
}
} else {
result = 0;
}
return Math.min( clientArea.width - 2, result );
}
private boolean valuesChanged( String text,
int textAlignment,
Color barColor,
int selection,
int maximum )
{
return !equal( this.text, text )
|| this.textAlignment != textAlignment
|| !equal( this.barColor, barColor )
|| this.selection != selection
|| this.maximum != maximum;
}
}
|
package org.cometd.server.transport;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.cometd.bayeux.Channel;
import org.cometd.bayeux.Message;
import org.cometd.bayeux.server.ServerMessage;
import org.cometd.server.AbstractServerTransport;
import org.cometd.server.BayeuxServerImpl;
import org.cometd.server.ServerSessionImpl;
import org.eclipse.jetty.continuation.Continuation;
import org.eclipse.jetty.continuation.ContinuationListener;
import org.eclipse.jetty.continuation.ContinuationSupport;
import org.eclipse.jetty.util.log.Log;
/** Abstract Long Polling Transport.
* <p>
* Transports based on this class can be configured with servlet init parameters:<dl>
* <dt>browserId</dt><dd>The Cookie name used to save a browser ID.</dd>
* <dt>maxSessionsPerBrowser</dt><dd>The maximum number of long polling sessions allowed per browser.</dd>
* <dt>multiSessionInterval</dt><dd>The polling interval to use once max session per browser is exceeded.</dd>
* <dt>autoBatch</dt><dd>If true a batch will be automatically created to span the handling of messages received from a session.</dd>
* </dl>
*
*/
public abstract class LongPollingTransport extends HttpTransport
{
public final static String PREFIX="long-polling";
public final static String BROWSER_ID_OPTION="browserId";
public final static String MAX_SESSIONS_PER_BROWSER_OPTION="maxSessionsPerBrowser";
public final static String MULTI_SESSION_INTERVAL_OPTION="multiSessionInterval";
public final static String AUTOBATCH_OPTION="autoBatch";
private final ConcurrentHashMap<String, AtomicInteger> _browserMap=new ConcurrentHashMap<String, AtomicInteger>();
protected String _browserId="BAYEUX_BROWSER";
private int _maxSessionsPerBrowser=1;
private long _multiSessionInterval=2000;
private boolean _autoBatch=true;
protected LongPollingTransport(BayeuxServerImpl bayeux,String name)
{
super(bayeux,name);
setOptionPrefix(PREFIX);
setOption(BROWSER_ID_OPTION,_browserId);
setOption(MAX_SESSIONS_PER_BROWSER_OPTION,_maxSessionsPerBrowser);
setOption(MULTI_SESSION_INTERVAL_OPTION,_multiSessionInterval);
setOption(AUTOBATCH_OPTION,_autoBatch);
}
@Override
protected void init()
{
super.init();
_browserId=getOption(BROWSER_ID_OPTION,_browserId);
_maxSessionsPerBrowser=getOption(MAX_SESSIONS_PER_BROWSER_OPTION,_maxSessionsPerBrowser);
_multiSessionInterval=getOption(MULTI_SESSION_INTERVAL_OPTION,_multiSessionInterval);
_autoBatch=getOption(AUTOBATCH_OPTION,_autoBatch);
}
protected String getBrowserId(HttpServletRequest request, HttpServletResponse response)
{
Cookie[] cookies=request.getCookies();
if (cookies != null)
{
for (Cookie cookie : cookies)
{
if (_browserId.equals(cookie.getName()))
return cookie.getValue();
}
}
String browser_id=Long.toHexString(request.getRemotePort()) + Long.toString(getBayeux().randomLong(),36) + Long.toString(System.currentTimeMillis(),36)
+ Long.toString(request.getRemotePort(),36);
Cookie cookie=new Cookie(_browserId,browser_id);
cookie.setPath("/");
cookie.setMaxAge(-1);
response.addCookie(cookie);
return browser_id;
}
protected boolean addBrowserSession(String browserId, String clientId)
{
if (_maxSessionsPerBrowser < 0)
return true;
if (_maxSessionsPerBrowser == 0)
return false;
AtomicInteger sessions = _browserMap.get(browserId);
if (sessions == null)
{
AtomicInteger newSessions = new AtomicInteger();
sessions = _browserMap.putIfAbsent(browserId, newSessions);
if (sessions == null)
sessions = newSessions;
}
// Synchronization is necessary to avoid modifying
// a structure that has been removed from the map
synchronized (sessions)
{
// The entry could have been removed concurrently by removeBrowserSession()
if (!_browserMap.containsKey(browserId))
_browserMap.put(browserId, sessions);
// TODO, the maxSessionsPerBrowser should be parametrized on user-agent
if (sessions.getAndIncrement() < _maxSessionsPerBrowser)
{
return true;
}
return false;
}
}
protected boolean removeBrowserSession(String browserId, String clientId)
{
AtomicInteger sessions = _browserMap.get(browserId);
if (sessions != null)
{
// Synchronization is necessary to avoid modifying the
// structure after the if statement but before the remove call
synchronized (sessions)
{
if (sessions.decrementAndGet() == 0)
{
_browserMap.remove(browserId, sessions);
return true;
}
}
}
return false;
}
protected AtomicInteger getBrowserSessions(String browserId)
{
return _browserMap.get(browserId);
}
@Override
public void handle(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException
{
// is this a resumed connect?
LongPollScheduler scheduler=(LongPollScheduler)request.getAttribute("cometd.scheduler");
if (scheduler==null)
{
// No - process messages
// remember if we start a batch
boolean batch=false;
// Don't know the session until first message or handshake response.
ServerSessionImpl session=null;
boolean connect=false;
try
{
ServerMessage.Mutable[] messages = parseMessages(request);
if (messages==null)
return;
PrintWriter writer=null;
// for each message
for (ServerMessage.Mutable message : messages)
{
// Is this a connect?
connect = Channel.META_CONNECT.equals(message.getChannel());
// Get the session from the message
if (session==null)
{
session=(ServerSessionImpl)getBayeux().getSession(message.getClientId());
if (_autoBatch && !batch && session!=null && !connect)
{
// start a batch to group all resulting messages into a single response.
batch=true;
session.startBatch();
}
}
if (connect && session!=null)
{
// cancel previous scheduler to cancel any prior waiting long poll
// this should also dec the Browser ID
session.setScheduler(null);
}
// remember the connected status
boolean was_connected=session!=null && session.isConnected();
// handle the message
// the actual reply is return from the call, but other messages may
// also be queued on the session.
ServerMessage reply = getBayeux().handle(session,message);
// Do we have a reply
if (reply!=null)
{
if (session==null)
{
// This must be a handshake
// extract a session from the reply (if we don't already know it
session=(ServerSessionImpl)getBayeux().getSession(reply.getClientId());
// get the user agent while we are at it.
if (session!=null)
session.setUserAgent(request.getHeader("User-Agent"));
}
else
{
// If this is a connect or we can send messages with any response
if (connect || !(isMetaConnectDeliveryOnly()||session.isMetaConnectDeliveryOnly()))
{
// send the queued messages
writer=sendQueue(request,response,session,writer);
}
// special handling for connect
if (connect)
{
long timeout = session.calculateTimeout(getTimeout());
// Should we suspend?
// If the writer is non null, we have already started sending a response, so we should not suspend
if(timeout>0 && was_connected && writer==null && reply.isSuccessful() && session.isQueueEmpty())
{
// If we don't have too many long polls from this browser
String browserId=getBrowserId(request,response);
if (addBrowserSession(browserId, session.getId()))
{
// suspend and wait for messages
Continuation continuation = ContinuationSupport.getContinuation(request);
continuation.setTimeout(timeout);
continuation.suspend();
scheduler=new LongPollScheduler(session,continuation,reply,browserId);
session.setScheduler(scheduler);
request.setAttribute("cometd.scheduler",scheduler);
reply=null;
}
else
{
// Advise multiple clients from same browser
Map<String, Object> advice = reply.asMutable().getAdvice(true);
advice.put("multiple-clients", true);
if (_multiSessionInterval > 0)
{
advice.put(Message.RECONNECT_FIELD, Message.RECONNECT_RETRY_VALUE);
advice.put(Message.INTERVAL_FIELD, _multiSessionInterval);
}
else
{
advice.put(Message.RECONNECT_FIELD, Message.RECONNECT_NONE_VALUE);
}
session.reAdvise();
}
}
else if (session.isConnected())
{
session.startIntervalTimeout();
}
}
}
// If the reply has not been otherwise handled, send it
if (reply!=null)
{
reply=getBayeux().extendReply(session,reply);
if (reply!=null)
writer=send(request,response,writer, reply);
}
}
// disassociate the reply
message.setAssociated(null);
}
if (writer!=null)
complete(writer);
}
finally
{
// if we started a batch - end it now
if (batch)
{
boolean ended=session.endBatch();
// flush session if not done by the batch
// since some browsers well order script gets
if (!ended && isAlwaysFlushingAfterHandle())
session.flush();
}
else if (session!=null && !connect && isAlwaysFlushingAfterHandle())
session.flush();
}
}
else
{
// Get the resumed session
ServerSessionImpl session=scheduler.getSession();
if (session.isConnected())
session.startIntervalTimeout();
// Send the message queue
PrintWriter writer=sendQueue(request,response,session,null);
// send the connect reply
ServerMessage reply=scheduler.getReply();
reply=getBayeux().extendReply(session,reply);
writer=send(request,response,writer, reply);
complete(writer);
}
}
private PrintWriter sendQueue(HttpServletRequest request, HttpServletResponse response,ServerSessionImpl session, PrintWriter writer)
throws IOException
{
final List<ServerMessage> queue = session.takeQueue();
for (ServerMessage m:queue)
writer=send(request,response,writer, m);
return writer;
}
/**
* @return true if the transport always flushes at the end of a call to {@link #handle(HttpServletRequest, HttpServletResponse)}.
*/
abstract protected boolean isAlwaysFlushingAfterHandle();
abstract protected PrintWriter send(HttpServletRequest request,HttpServletResponse response,PrintWriter writer, ServerMessage message) throws IOException;
abstract protected void complete(PrintWriter writer) throws IOException;
private class LongPollScheduler implements AbstractServerTransport.OneTimeScheduler, ContinuationListener
{
private final ServerSessionImpl _session;
private final Continuation _continuation;
private final ServerMessage _reply;
private String _browserId;
public LongPollScheduler(ServerSessionImpl session, Continuation continuation, ServerMessage reply,String browserId)
{
_session = session;
_continuation = continuation;
_continuation.addContinuationListener(this);
_reply = reply;
_browserId=browserId;
}
public void cancel()
{
if (_continuation!=null && _continuation.isSuspended() && !_continuation.isExpired())
{
try
{
decBrowserId();
HttpServletResponse response = (HttpServletResponse)_continuation.getServletResponse();
response.sendError(HttpServletResponse.SC_REQUEST_TIMEOUT);
}
catch(IOException e)
{
Log.ignore(e);
}
try
{
_continuation.complete();
}
catch(Exception e)
{
Log.ignore(e);
}
}
}
public void schedule()
{
decBrowserId();
_continuation.resume();
}
public ServerSessionImpl getSession()
{
return _session;
}
public Continuation getContinuation()
{
return _continuation;
}
public ServerMessage getReply()
{
return _reply;
}
public void onComplete(Continuation continuation)
{
decBrowserId();
}
public void onTimeout(Continuation continuation)
{
_session.setScheduler(null);
}
private void decBrowserId()
{
synchronized (this)
{
if (_browserId!=null)
{
LongPollingTransport.this.removeBrowserSession(_browserId, _session.getId());
_browserId=null;
}
}
}
}
}
|
package attackontinytim.barquest;
import android.database.Cursor;
import android.os.Parcelable;
import android.os.Parcel;
import java.util.Random;
import attackontinytim.barquest.Database.Monster;
import attackontinytim.barquest.Database.QuestRepo;
public class Quest {
/* VARIABLES */
/**
* Common variables for all Quest objects
*/
private int id; // Quest ID
private String name; // Name of the quest
private String description; // Text description of the quest
private int goal; // Total number of monsters to slay
private int progress; // Number of monsters slain for the quest
private boolean completed; // Boolean indicating if the quest has been completed
private int XP; // XP amount to give as a reward can be 0
private double Money; // Money to give as a reward can be 0
private String itemName; // Name of the item the give as a reward, can be an empty string
private String QuestType; // Type of quest can be by Rarity or Monster
private String QuestTarget; // Target is the targeted rarity like Common, or a specific monster name
/* CONSTRUCTORS */
// Default constructor
public Quest() {
this.id = 0;
this.name = "Default";
this.description = "Default Quest";
this.goal = 0;
this.progress = 0;
this.completed = true;
this.XP = 0;
this.Money = 0;
this.itemName = "";
this.QuestType = "Rarity";
this.QuestTarget = "Common";
}
public Quest(int id, String name, String description, int goal, int progress, boolean completed, int XP, double money, String itemName, String questType, String questTarget) {
this.id = id;
this.name = name;
this.description = description;
this.goal = goal;
this.progress = progress;
this.completed = completed;
this.XP = XP;
this.Money = money;
this.itemName = itemName;
this.QuestType = questType;
this.QuestTarget = questTarget;
}
public Quest(Quest quest) {
this.id = quest.getId();
this.name = quest.getName();
this.description = quest.getDescription();
this.goal = quest.getGoal();
this.progress = quest.getProgress();
this.completed = quest.isCompleted();
this.XP = quest.getXP();
this.Money = quest.getMoney();
this.itemName = quest.getItemName();
this.QuestType = quest.getQuestType();
this.QuestTarget = quest.getQuestTarget();
}
public Quest(Cursor cursor) {
this.id = cursor.getInt(0);
this.name = cursor.getString(1);
this.description = cursor.getString(2);
this.goal = cursor.getInt(3);
this.progress = cursor.getInt(4);
this.completed = cursor.getInt(5) > 0; // Check for boolean true or false
this.XP = cursor.getInt(6);
this.Money = cursor.getInt(7);
this.itemName = cursor.getString(8);
this.QuestType = cursor.getString(9);
this.QuestTarget = cursor.getString(10);
}
/* GET-METHODS */
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public int getGoal() {
return goal;
}
public void setGoal(int goal) {
this.goal = goal;
}
public int getProgress() {
return progress;
}
public void setProgress(int progress) {
this.progress = progress;
}
public boolean isCompleted() {
return completed;
}
public void setCompleted(boolean completed) {
this.completed = completed;
}
public int getXP() {
return XP;
}
public void setXP(int XP) {
this.XP = XP;
}
public double getMoney() {
return Money;
}
public void setMoney(double money) {
Money = money;
}
public String getItemName() {
return itemName;
}
public void setItemName(String itemName) {
this.itemName = itemName;
}
public String getQuestType() {
return QuestType;
}
public void setQuestType(String questType) {
QuestType = questType;
}
public String getQuestTarget() {
return QuestTarget;
}
public void setQuestTarget(String questTarget) {
QuestTarget = questTarget;
}
public String getName() {
return name;
}
/**
* Takes a monster after completing a battle and updates quest progress based on the monster
* Currently checks for the monster rarity and the monster name
* @param mon Monster that was killed
*/
public void updateQuestProgress(Monster mon) {
if (this.getQuestType().equalsIgnoreCase("Rarity")) {
if (this.getQuestTarget().equalsIgnoreCase(mon.getRarity())) {
this.setProgress(this.getProgress() + 1);
}
} else if (this.getQuestType().equalsIgnoreCase("Monster")) {
if (this.getQuestTarget().equalsIgnoreCase(mon.getName())) {
this.setProgress(this.getProgress() + 1);
}
}
QuestRepo.updateQuest(this);
}
}
|
/**
* This class handles communications with a client.
*/
package au.com.darkside.XServer;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.Socket;
import java.util.Vector;
import android.util.Log;
import au.com.darkside.XServer.Xext.Extensions;
/**
* @author Matthew Kwan
*
* This class handles communications with a client.
*/
public class Client extends Thread {
private interface CloseSocketProc {
public static class Nop implements CloseSocketProc {
public void close() throws IOException {
}
}
public static final CloseSocketProc NOP = new Nop();
public void close() throws IOException;
}
private static class TrueCloseSocketProc implements CloseSocketProc {
private Socket mSocket;
public TrueCloseSocketProc(Socket socket) {
mSocket = socket;
}
@Override
public void close() throws IOException {
mSocket.close();
}
}
public static final int Destroy = 0;
public static final int RetainPermanent = 1;
public static final int RetainTemporary = 2;
private static final String LOG_TAG = "X Server";
private final XServer _xServer;
private final CloseSocketProc mCloseSocketProc;
private final InputOutput _inputOutput;
private final int _resourceIdBase;
private final int _resourceIdMask;
private final Vector<Resource> _resources;
private int _sequenceNumber = 0;
private boolean _closeConnection = false;
private boolean _isConnected = true;
private int _closeDownMode = Destroy;
/**
* Constructor.
*
* @param xserver The X Server.
* @param socket The communications socket.
* @param resourceIdBase The lowest resource ID the client can use.
* @param resourceIdMask The range of resource IDs the client can use.
* @throws IOException
*/
public Client (
XServer xserver,
Socket socket,
int resourceIdBase,
int resourceIdMask
) throws IOException {
this(xserver, socket, new InputOutput(socket), resourceIdBase,
resourceIdMask);
}
public Client(XServer xserver, InputStream in, OutputStream out,
int resourceIdBase, int resourceIdMask) throws IOException {
this(xserver, null, new InputOutput(in, out), resourceIdBase,
resourceIdMask);
}
public Client(XServer xserver, Socket socket, InputOutput conn,
int resourceIdBase, int resourceIdMask) throws IOException {
_xServer = xserver;
mCloseSocketProc = socket != null ? new TrueCloseSocketProc(socket)
: CloseSocketProc.NOP;
_inputOutput = conn;
_resourceIdBase = resourceIdBase;
_resourceIdMask = resourceIdMask;
_resources = new Vector<Resource>();
}
/**
* Get the client's close down mode.
*
* @return The client's close down mode.
*/
public int
getCloseDownMode () {
return _closeDownMode;
}
/**
* Return the input/output handle.
*
* @return The input/output handle.
*/
public InputOutput
getInputOutput () {
return _inputOutput;
}
/**
* Get the sequence number of the latest request sent by the client.
*
* @return The last-used sequence number.
*/
public int
getSequenceNumber () {
return _sequenceNumber;
}
/**
* Return whether the client is connected.
*
* @return True if the client is connected.
*/
public boolean
isConnected () {
return _isConnected;
}
/**
* Add to the client's list of resources.
*
* @param r The resource to add.
*/
public synchronized void
addResource (
Resource r
) {
_resources.add (r);
}
/**
* Remove a resource from the client's list.
*/
public synchronized void
freeResource (
Resource r
) {
_resources.remove (r);
}
/**
* Run the communications thread.
*/
public void
run () {
Log.i(LOG_TAG, String.format("%s: a new client started.", getName()));
try {
doComms ();
} catch (IOException e) {
}
synchronized (_xServer) {
close ();
}
}
/**
* Cancel the communications thread.
*/
public void
cancel () {
_closeConnection = true;
close ();
}
/**
* Close the communications thread and free resources.
*/
private void
close () {
if (!_isConnected)
return;
_isConnected = false;
try {
_inputOutput.close ();
mCloseSocketProc.close();
} catch (IOException e) {
}
// Clear the resources associated with this client.
if (_closeDownMode == Destroy)
for (Resource r: _resources)
r.delete ();
_resources.clear ();
_xServer.removeClient (this);
}
/**
* Handle communications with the client.
* @throws IOException
*/
private void
doComms () throws IOException {
// Read the connection setup.
int byteOrder = _inputOutput.readByte ();
if (byteOrder == 0x42)
_inputOutput.setMSB (true);
else if (byteOrder == 0x6c)
_inputOutput.setMSB (false);
else
return;
_inputOutput.readByte (); // Unused.
_inputOutput.readShort (); // Protocol major version.
_inputOutput.readShort (); // Protocol minor version.
int nameLength = _inputOutput.readShort ();
int dataLength = _inputOutput.readShort ();
_inputOutput.readShort (); // Unused.
if (nameLength > 0) {
_inputOutput.readSkip (nameLength); // Authorization protocol name.
_inputOutput.readSkip (-nameLength & 3); // Padding.
}
if (dataLength > 0) {
_inputOutput.readSkip (dataLength); // Authorization protocol data.
_inputOutput.readSkip (-dataLength & 3); // Padding.
}
// Complete the setup.
final byte[] vendor = _xServer.vendor.getBytes ();
int pad = -vendor.length & 3;
int extra = 26 + 2 * _xServer.getNumFormats ()
+ (vendor.length + pad) / 4;
Keyboard kb = _xServer.getKeyboard ();
synchronized (_inputOutput) {
_inputOutput.writeByte ((byte) 1); // Success.
_inputOutput.writeByte ((byte) 0); // Unused.
_inputOutput.writeShort (_xServer.ProtocolMajorVersion);
_inputOutput.writeShort (_xServer.ProtocolMinorVersion);
_inputOutput.writeShort ((short) extra); // Length of data.
_inputOutput.writeInt (_xServer.ReleaseNumber); // Release number.
_inputOutput.writeInt (_resourceIdBase);
_inputOutput.writeInt (_resourceIdMask);
_inputOutput.writeInt (0); // Motion buffer size.
_inputOutput.writeShort ((short) vendor.length); // Vendor length.
_inputOutput.writeShort ((short) 0x7fff); // Max request length.
_inputOutput.writeByte ((byte) 1); // Number of screens.
_inputOutput.writeByte ((byte) _xServer.getNumFormats ());
_inputOutput.writeByte ((byte) 0); // Image byte order (0=LSB, 1=MSB).
_inputOutput.writeByte ((byte) 1); // Bitmap bit order (0=LSB, 1=MSB).
_inputOutput.writeByte ((byte) 8); // Bitmap format scanline unit.
_inputOutput.writeByte ((byte) 8); // Bitmap format scanline pad.
_inputOutput.writeByte ((byte) kb.getMinimumKeycode ());
_inputOutput.writeByte ((byte) kb.getMaximumKeycode ());
_inputOutput.writePadBytes (4); // Unused.
if (vendor.length > 0) { // Write padded vendor string.
_inputOutput.writeBytes (vendor, 0, vendor.length);
_inputOutput.writePadBytes (pad);
}
_xServer.writeFormats (_inputOutput);
_xServer.getScreen().write (_inputOutput);
}
_inputOutput.flush ();
while (!_closeConnection) {
int n = _inputOutput.readByte ();
byte opcode = (byte)n;
byte arg = (byte) _inputOutput.readByte ();
int requestLength = _inputOutput.readShort ();
int bytesRemaining;
if (requestLength == 0) { // Handle big requests.
requestLength = _inputOutput.readInt ();
if (requestLength > 2)
bytesRemaining = requestLength * 4 - 8;
else
bytesRemaining = 0;
} else {
bytesRemaining = requestLength * 4 - 4;
}
// Deal with server grabs.
while (!_xServer.processingAllowed (this)) {
try {
sleep (100);
} catch (InterruptedException e) {
}
}
synchronized (_xServer) {
String fmt = "%s: request: opcode=%s (%d), bytesRemaining=%d";
String msg = String.format(fmt, getName(),
RequestCode.toString(opcode), opcode,
bytesRemaining);
Log.i(LOG_TAG, msg);
processRequest (opcode, arg, bytesRemaining);
}
}
}
/**
* Is it OK to create a resource with the specified ID?
*
* @param id The resource ID.
* @return True if it is OK to create a resource with the ID.
*/
private boolean
validResourceId (
int id
) {
return ((id & ~_resourceIdMask) == _resourceIdBase
&& !_xServer.resourceExists (id));
}
/**
* Process a single request from the client.
*
* @param opcode The request's opcode.
* @param arg Optional first argument.
* @param bytesRemaining Bytes yet to be read in the request.
* @throws IOException
*/
private void
processRequest (
byte opcode,
byte arg,
int bytesRemaining
) throws IOException {
_sequenceNumber++;
switch (opcode) {
case RequestCode.CreateWindow:
if (bytesRemaining < 28) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt (); // Window ID.
int parent = _inputOutput.readInt (); // Parent.
Resource r = _xServer.getResource (parent);
bytesRemaining -= 8;
if (!validResourceId (id)) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.IDChoice, opcode, id);
} else if (r == null || r.getType () != Resource.WINDOW) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Window, opcode,
parent);
} else {
Window w = (Window) r;
w.processCreateWindowRequest (_inputOutput, this,
_sequenceNumber, id, arg, bytesRemaining);
}
}
break;
case RequestCode.ChangeWindowAttributes:
case RequestCode.GetWindowAttributes:
case RequestCode.DestroyWindow:
case RequestCode.DestroySubwindows:
case RequestCode.ChangeSaveSet:
case RequestCode.ReparentWindow:
case RequestCode.MapWindow:
case RequestCode.MapSubwindows:
case RequestCode.UnmapWindow:
case RequestCode.UnmapSubwindows:
case RequestCode.ConfigureWindow:
case RequestCode.CirculateWindow:
case RequestCode.QueryTree:
case RequestCode.ChangeProperty:
case RequestCode.DeleteProperty:
case RequestCode.GetProperty:
case RequestCode.ListProperties:
case RequestCode.QueryPointer:
case RequestCode.GetMotionEvents:
case RequestCode.TranslateCoordinates:
case RequestCode.ClearArea:
case RequestCode.ListInstalledColormaps:
case RequestCode.RotateProperties:
if (bytesRemaining < 4) {
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt ();
Resource r = _xServer.getResource (id);
bytesRemaining -= 4;
if (r == null || r.getType () != Resource.WINDOW) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Window, opcode, id);
} else {
r.processRequest (this, opcode, arg, bytesRemaining);
}
}
break;
case RequestCode.GetGeometry:
case RequestCode.CopyArea:
case RequestCode.CopyPlane:
case RequestCode.PolyPoint:
case RequestCode.PolyLine:
case RequestCode.PolySegment:
case RequestCode.PolyRectangle:
case RequestCode.PolyArc:
case RequestCode.FillPoly:
case RequestCode.PolyFillRectangle:
case RequestCode.PolyFillArc:
case RequestCode.PutImage:
case RequestCode.GetImage:
case RequestCode.PolyText8:
case RequestCode.PolyText16:
case RequestCode.ImageText8:
case RequestCode.ImageText16:
case RequestCode.QueryBestSize:
if (bytesRemaining < 4) {
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt ();
Resource r = _xServer.getResource (id);
bytesRemaining -= 4;
if (r == null || !r.isDrawable ()) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Drawable, opcode, id);
} else {
r.processRequest (this, opcode, arg, bytesRemaining);
}
}
break;
case RequestCode.InternAtom:
Atom.processInternAtomRequest (_xServer, this, arg,
bytesRemaining);
break;
case RequestCode.GetAtomName:
Atom.processGetAtomNameRequest (_xServer, this,
bytesRemaining);
break;
case RequestCode.GetSelectionOwner:
case RequestCode.SetSelectionOwner:
case RequestCode.ConvertSelection:
Selection.processRequest (_xServer, this, opcode,
bytesRemaining);
break;
case RequestCode.SendEvent:
case RequestCode.GrabPointer:
case RequestCode.UngrabPointer:
case RequestCode.GrabButton:
case RequestCode.UngrabButton:
case RequestCode.ChangeActivePointerGrab:
case RequestCode.GrabKeyboard:
case RequestCode.UngrabKeyboard:
case RequestCode.GrabKey:
case RequestCode.UngrabKey:
case RequestCode.AllowEvents:
case RequestCode.SetInputFocus:
case RequestCode.GetInputFocus:
_xServer.getScreen().processRequest (_xServer, this, opcode,
arg, bytesRemaining);
break;
case RequestCode.GrabServer:
if (bytesRemaining != 0) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
_xServer.grabServer (this);
}
break;
case RequestCode.UngrabServer:
if (bytesRemaining != 0) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
_xServer.ungrabServer (this);
}
break;
case RequestCode.WarpPointer:
case RequestCode.ChangePointerControl:
case RequestCode.GetPointerControl:
case RequestCode.SetPointerMapping:
case RequestCode.GetPointerMapping:
_xServer.getPointer().processRequest (_xServer, this, opcode,
arg, bytesRemaining);
break;
case RequestCode.OpenFont:
if (bytesRemaining < 8) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt (); // Font ID.
bytesRemaining -= 4;
if (!validResourceId (id)) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.IDChoice, opcode, id);
} else {
Font.processOpenFontRequest (_xServer, this, id,
bytesRemaining);
}
}
break;
case RequestCode.CloseFont:
if (bytesRemaining != 4) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt ();
Resource r = _xServer.getResource (id);
bytesRemaining -= 4;
if (r == null || r.getType () != Resource.FONT)
ErrorCode.write (this, ErrorCode.Font, opcode, id);
else
r.processRequest (this, opcode, arg, bytesRemaining);
}
break;
case RequestCode.QueryFont:
case RequestCode.QueryTextExtents:
if (bytesRemaining != 4) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt ();
Resource r = _xServer.getResource (id);
bytesRemaining -= 4;
if (r == null || !r.isFontable ()) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Font, opcode, id);
} else {
r.processRequest (this, opcode, arg, bytesRemaining);
}
}
break;
case RequestCode.ListFonts:
case RequestCode.ListFontsWithInfo:
Font.processListFonts (this, opcode, bytesRemaining);
break;
case RequestCode.SetFontPath:
Font.processSetFontPath (_xServer, this, bytesRemaining);
break;
case RequestCode.GetFontPath:
if (bytesRemaining != 0) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
Font.processGetFontPath (_xServer, this);
}
break;
case RequestCode.CreatePixmap:
if (bytesRemaining != 12) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt (); // Pixmap ID.
int did = _inputOutput.readInt (); // Drawable ID.
int width = _inputOutput.readShort (); // Width.
int height = _inputOutput.readShort (); // Height.
Resource r = _xServer.getResource (did);
if (!validResourceId (id)) {
ErrorCode.write (this, ErrorCode.IDChoice, opcode, id);
} else if (r == null || !r.isDrawable ()) {
ErrorCode.write (this, ErrorCode.Drawable, opcode,
did);
} else {
try {
Pixmap.processCreatePixmapRequest (_xServer, this,
id, width, height, arg, r);
} catch (OutOfMemoryError e) {
ErrorCode.write (this, ErrorCode.Alloc, opcode, 0);
}
}
}
break;
case RequestCode.FreePixmap:
if (bytesRemaining != 4) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt ();
Resource r = _xServer.getResource (id);
bytesRemaining -= 4;
if (r == null || r.getType () != Resource.PIXMAP)
ErrorCode.write (this, ErrorCode.Pixmap, opcode, id);
else
r.processRequest (this, opcode, arg, bytesRemaining);
}
break;
case RequestCode.CreateGC:
if (bytesRemaining < 12) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt (); // GContext ID.
int d = _inputOutput.readInt (); // Drawable ID.
Resource r = _xServer.getResource (d);
bytesRemaining -= 8;
if (!validResourceId (id)) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.IDChoice, opcode, id);
} else if (r == null || !r.isDrawable ()) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Drawable, opcode, d);
} else {
GContext.processCreateGCRequest (_xServer, this, id,
bytesRemaining);
}
}
break;
case RequestCode.ChangeGC:
case RequestCode.CopyGC:
case RequestCode.SetDashes:
case RequestCode.SetClipRectangles:
case RequestCode.FreeGC:
if (bytesRemaining < 4) {
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt ();
Resource r = _xServer.getResource (id);
bytesRemaining -= 4;
if (r == null || r.getType () != Resource.GCONTEXT) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.GContext, opcode, id);
} else {
r.processRequest (this, opcode, arg, bytesRemaining);
}
}
break;
case RequestCode.CreateColormap:
if (bytesRemaining != 12) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt (); // Colormap ID.
bytesRemaining -= 4;
if (!validResourceId (id)) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.IDChoice, opcode, id);
} else {
Colormap.processCreateColormapRequest (_xServer, this,
id, arg);
}
}
break;
case RequestCode.CopyColormapAndFree:
if (bytesRemaining != 8) {
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id1 = _inputOutput.readInt ();
int id2 = _inputOutput.readInt ();
Resource r = _xServer.getResource (id2);
if (r == null || r.getType () != Resource.COLORMAP)
ErrorCode.write (this, ErrorCode.Colormap, opcode,
id2);
else if (!validResourceId (id1))
ErrorCode.write (this, ErrorCode.IDChoice, opcode,
id1);
else
((Colormap) r).processCopyColormapAndFree (this, id1);
}
break;
case RequestCode.FreeColormap:
case RequestCode.InstallColormap:
case RequestCode.UninstallColormap:
case RequestCode.AllocColor:
case RequestCode.AllocNamedColor:
case RequestCode.AllocColorCells:
case RequestCode.AllocColorPlanes:
case RequestCode.FreeColors:
case RequestCode.StoreColors:
case RequestCode.StoreNamedColor:
case RequestCode.QueryColors:
case RequestCode.LookupColor:
if (bytesRemaining < 4) {
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt ();
Resource r = _xServer.getResource (id);
bytesRemaining -= 4;
if (r == null || r.getType () != Resource.COLORMAP) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Colormap, opcode, id);
} else {
r.processRequest (this, opcode, arg, bytesRemaining);
}
}
break;
case RequestCode.CreateCursor:
case RequestCode.CreateGlyphCursor:
if (bytesRemaining != 28) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt (); // Cursor ID.
bytesRemaining -= 4;
if (!validResourceId (id)) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.IDChoice, opcode, id);
} else {
Cursor.processCreateRequest (_xServer, this, opcode,
id, bytesRemaining);
}
}
break;
case RequestCode.FreeCursor:
case RequestCode.RecolorCursor:
if (bytesRemaining < 4) {
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int id = _inputOutput.readInt ();
Resource r = _xServer.getResource (id);
bytesRemaining -= 4;
if (r == null || r.getType () != Resource.CURSOR) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Colormap, opcode, id);
} else {
r.processRequest (this, opcode, arg, bytesRemaining);
}
}
break;
case RequestCode.QueryExtension:
_xServer.processQueryExtensionRequest (this, bytesRemaining);
break;
case RequestCode.ListExtensions:
if (bytesRemaining != 0) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
_xServer.writeListExtensions (this);
}
break;
case RequestCode.QueryKeymap:
case RequestCode.ChangeKeyboardMapping:
case RequestCode.GetKeyboardMapping:
case RequestCode.ChangeKeyboardControl:
case RequestCode.SetModifierMapping:
case RequestCode.GetModifierMapping:
case RequestCode.GetKeyboardControl:
case RequestCode.Bell:
_xServer.getKeyboard().processRequest (_xServer, this, opcode,
arg, bytesRemaining);
break;
case RequestCode.SetScreenSaver:
if (bytesRemaining != 8) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
int timeout = _inputOutput.readShort (); // Timeout.
int interval = _inputOutput.readShort (); // Interval
int pb = _inputOutput.readByte (); // Prefer-blanking.
int ae = _inputOutput.readByte (); // Allow-exposures.
_inputOutput.readSkip (2); // Unused.
_xServer.setScreenSaver (timeout, interval, pb, ae);
}
break;
case RequestCode.GetScreenSaver:
if (bytesRemaining != 0) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
_xServer.writeScreenSaver (this);
}
break;
case RequestCode.ChangeHosts:
_xServer.processChangeHostsRequest (this, arg, bytesRemaining);
break;
case RequestCode.ListHosts:
if (bytesRemaining != 0) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
_xServer.writeListHosts (this);
}
break;
case RequestCode.SetAccessControl:
if (bytesRemaining != 0) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
_xServer.setAccessControl (arg == 1);
}
break;
case RequestCode.SetCloseDownMode:
processSetCloseDownModeRequest (arg, bytesRemaining);
break;
case RequestCode.KillClient:
processKillClientRequest (bytesRemaining);
break;
case RequestCode.ForceScreenSaver:
if (bytesRemaining != 0) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, opcode, 0);
} else {
_xServer.getScreen().blank (arg == 1);
}
break;
case RequestCode.NoOperation:
_inputOutput.readSkip (bytesRemaining);
break;
default: // Opcode not implemented.
if (opcode < 0) {
Extensions.processRequest (_xServer, this, opcode, arg,
bytesRemaining);
} else {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Implementation,
opcode, 0);
}
break;
}
}
/**
* Process a SetCloseDownMode request.
*
* @param mode The close down mode.
* @param bytesRemaining Bytes yet to be read in the request.
* @throws IOException
*/
public void
processSetCloseDownModeRequest (
int mode,
int bytesRemaining
) throws IOException {
if (bytesRemaining != 0) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length,
RequestCode.SetCloseDownMode, 0);
return;
}
_closeDownMode = mode;
for (Resource r: _resources)
r.setCloseDownMode (mode);
}
/**
* Process a KillClient request.
*
* @param bytesRemaining Bytes yet to be read in the request.
* @throws IOException
*/
public void
processKillClientRequest (
int bytesRemaining
) throws IOException {
if (bytesRemaining != 4) {
_inputOutput.readSkip (bytesRemaining);
ErrorCode.write (this, ErrorCode.Length, RequestCode.KillClient,
0);
return;
}
int id = _inputOutput.readInt ();
Client client = null;
if (id != 0) {
Resource r = _xServer.getResource (id);
if (r == null) {
ErrorCode.write (this, ErrorCode.Length,
RequestCode.KillClient, 0);
return;
}
client = r.getClient ();
}
if (client != null && client._isConnected)
client._closeConnection = true;
else if (client == null || client._closeDownMode != Destroy)
_xServer.destroyClientResources (client);
}
}
|
package act.db.morphia.util;
import act.db.AdaptiveRecord;
import com.alibaba.fastjson.JSONObject;
import com.mongodb.DBObject;
import org.mongodb.morphia.AbstractEntityInterceptor;
import org.mongodb.morphia.annotations.Property;
import org.mongodb.morphia.mapping.Mapper;
import org.mongodb.morphia.utils.IterHelper;
import org.osgl.$;
import org.osgl.util.C;
import org.osgl.util.S;
import javax.inject.Singleton;
import java.lang.reflect.Field;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
@Singleton
public class AdaptiveRecordMappingInterceptor extends AbstractEntityInterceptor {
private final Set<String> BUILT_IN_PROPS = C.setOf("_id,className,_created,_modified,v".split(","));
private final ConcurrentMap<Class<?>, Set<String>> mappedPropertiesLookup = new ConcurrentHashMap<>();
@Override
public void prePersist(Object ent, DBObject dbObj, Mapper mapper) {
if (null == ent) {
return;
}
Class<?> c = ent.getClass();
if (AdaptiveRecord.class.isAssignableFrom(c)) {
AdaptiveRecord ar = $.cast(ent);
Map<String, Object> kv = ar.internalMap();
for (Map.Entry<String, Object> entry : kv.entrySet()) {
dbObj.put(entry.getKey(), ValueObjectConverter.INSTANCE.encode(entry.getValue()));
}
Object o = kv.get("v");
Long v = null;
if (null != o && o instanceof Number) {
v = ((Number) o).longValue();
}
if (null != v) {
dbObj.put("v", v);
}
}
}
@Override
public void postLoad(Object ent, DBObject dbObj, Mapper mapper) {
final Class<?> c = ent.getClass();
if (AdaptiveRecord.class.isAssignableFrom(c)) {
AdaptiveRecord ar = $.cast(ent);
final Map<String, Object> kv = ar.internalMap();
final AdaptiveRecord.MetaInfo metaInfo = ar.metaInfo();
new IterHelper<>().loopMap(dbObj, new IterHelper.MapIterCallback<Object, Object>() {
@Override
public void eval(final Object k, final Object val) {
final String key = S.string(k);
if (BUILT_IN_PROPS.contains(key) || metaInfo.setterFieldSpecs.containsKey(key) || mappedProperties(c).contains(key)) {
return;
}
kv.put(key, JSONObject.toJSON(val));
}
});
kv.put("v", dbObj.get("v"));
}
}
private Set<String> mappedProperties(Class c) {
Set<String> set = mappedPropertiesLookup.get(c);
if (null == set) {
set = findMappedProperties(c);
mappedPropertiesLookup.putIfAbsent(c, set);
}
return set;
}
private Set<String> findMappedProperties(Class c) {
Set<String> set = new HashSet<>();
List<Field> fieldList = $.fieldsOf(c, true);
for (Field field : fieldList) {
Property property = field.getAnnotation(Property.class);
if (null != property) {
set.add(property.value());
}
}
return set;
}
}
|
package org.phenotips.obo2solr;
import java.io.File;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
public class SolrUpdateGenerator
{
public void transform(File input, File output, Map<String, Double> fieldSelection)
{
throw new UnsupportedOperationException();
}
public Map<String, TermData> transform(String ontologyUrl, Map<String, Double> fieldSelection)
{
return new HashMap<String, TermData>();
}
public Map<String, TermData> transform(URL input, Map<String, Double> fieldSelection)
{
throw new UnsupportedOperationException();
}
}
|
package com.ar.tothestars;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.widget.ImageView;
import android.widget.Toast;
import com.ar.tothestars.models.Photo;
import com.ar.tothestars.services.APODManager;
import com.ar.tothestars.services.APODParser;
import com.ar.tothestars.ui.PaletteTransformation;
import com.squareup.picasso.Picasso;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Locale;
import retrofit.Callback;
import retrofit.RetrofitError;
import retrofit.client.Response;
import uk.co.senab.photoview.PhotoViewAttacher;
public class MainActivity extends ActionBarActivity {
private final static String DATE_FORMAT = "yyyy-MM-dd";
private static final String PHOTO = "photo";
private Photo mPhoto;
private ImageView mPhotoView;
private PhotoViewAttacher mPhotoViewAttacher;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mPhotoView = (ImageView) findViewById(R.id.photo);
if (savedInstanceState != null) {
mPhoto = savedInstanceState.getParcelable(PHOTO);
}
if (mPhoto == null) {
getPhoto();
} else {
showPhoto();
}
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelable(PHOTO, mPhoto);
}
private void getPhoto() {
SimpleDateFormat sdf = new SimpleDateFormat(DATE_FORMAT, Locale.US);
Calendar calendar = Calendar.getInstance();
calendar.add(Calendar.DATE, -1);
String dateFormatted = sdf.format(calendar.getTime());
APODManager.getClient()
.getPhoto(dateFormatted, true, Credentials.NASA_KEY, new Callback<String>() {
@Override
public void success(String s, Response response) {
mPhoto = APODParser.getPhoto(s);
showPhoto();
}
@Override
public void failure(RetrofitError error) {
Toast.makeText(MainActivity.this, getString(R.string.server_error), Toast.LENGTH_SHORT).show();
}
});
}
private void showPhoto() {
Picasso.with(this)
.load(mPhoto.getUrl())
.fit()
.centerInside()
.transform(PaletteTransformation.instance())
.into(mPhotoView, new com.squareup.picasso.Callback.EmptyCallback() {
@Override
public void onSuccess() {
//give gestures to the photo
if (mPhotoViewAttacher != null) {
mPhotoViewAttacher.update();
} else {
mPhotoViewAttacher = new PhotoViewAttacher(mPhotoView);
}
// //get palette
// Bitmap bitmap = ((BitmapDrawable) mPhotoView.getDrawable()).getBitmap(); // Ew!
// Palette palette = PaletteTransformation.getPalette(bitmap);
// int mutedDark = palette.getDarkMutedColor(0x000000);
// mPhotoView.setBackgroundColor(mutedDark);
}
});
}
}
|
package at.ac.tuwien.kr.alpha.grounder.atoms;
import at.ac.tuwien.kr.alpha.common.Predicate;
import at.ac.tuwien.kr.alpha.common.atoms.BasicAtom;
import at.ac.tuwien.kr.alpha.common.terms.ConstantTerm;
import at.ac.tuwien.kr.alpha.common.terms.Term;
import at.ac.tuwien.kr.alpha.common.terms.VariableTerm;
import at.ac.tuwien.kr.alpha.grounder.Substitution;
import java.util.HashMap;
import java.util.List;
import static at.ac.tuwien.kr.alpha.Util.oops;
public class EnumerationAtom extends BasicAtom {
public static final Predicate ENUMERATION_PREDICATE = Predicate.getInstance("_Enumeration", 3);
private static final HashMap<Term, HashMap<Term, Integer>> ENUMERATIONS = new HashMap<>();
public EnumerationAtom(List<Term> terms) {
super(ENUMERATION_PREDICATE, terms);
if (terms.size() != 3) {
throw new RuntimeException("EnumerationAtom must have arity three. Given terms are of wrong size: " + terms);
}
if (!(getTerms().get(2) instanceof VariableTerm)) {
throw new RuntimeException("Third parameter of EnumerationAtom must be a variable: " + terms);
}
}
public static void resetEnumerations() {
ENUMERATIONS.clear();
}
private Integer getEnumerationIndex(Term identifier, Term enumerationTerm) {
ENUMERATIONS.putIfAbsent(identifier, new HashMap<>());
HashMap<Term, Integer> enumeratedTerms = ENUMERATIONS.get(identifier);
Integer assignedInteger = enumeratedTerms.get(enumerationTerm);
if (assignedInteger == null) {
int enumerationIndex = enumeratedTerms.size() + 1;
enumeratedTerms.put(enumerationTerm, enumerationIndex);
return enumerationIndex;
} else {
return assignedInteger;
}
}
/**
* Based on a given substitution, substitutes the first two terms of this {@link EnumerationAtom} with the values from the substitution,
* and returns a new substitution with all mappings from the input substitution plus a binding for the third term of the enum atom to the
* integer index that is mapped to the first two terms in the internal <code>ENUMERATIONS</code> map.
*
* @param substitution an input substitution which must provide ground terms for the first two terms of the enumeration atom
* @return a new substitution where the third term of the enumeration atom is bound to an integer
*/
public Substitution addEnumerationIndexToSubstitution(Substitution substitution) {
Term idTerm = this.getTerms().get(0).substitute(substitution);
Term enumerationTerm = this.getTerms().get(1).substitute(substitution);
if (!enumerationTerm.isGround()) {
throw new RuntimeException("Enumeration term is not ground after substitution. Should not happen.");
}
Integer enumerationIndex = getEnumerationIndex(idTerm, enumerationTerm);
Substitution retVal = new Substitution(substitution);
retVal.put((VariableTerm) getTerms().get(2), ConstantTerm.getInstance(enumerationIndex));
return retVal;
}
@Override
public EnumerationAtom substitute(Substitution substitution) {
return new EnumerationAtom(super.substitute(substitution).getTerms());
}
@Override
public EnumerationLiteral toLiteral(boolean positive) {
if (!positive) {
throw oops("IntervalLiteral cannot be negated");
}
return new EnumerationLiteral(this);
}
@Override
public EnumerationLiteral toLiteral() {
return toLiteral(true);
}
}
|
package ca.corefacility.bioinformatics.irida.model;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collection;
import java.util.Objects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.OneToMany;
import javax.persistence.PostLoad;
import javax.persistence.PrePersist;
import javax.persistence.PreUpdate;
import javax.persistence.Table;
import javax.persistence.Transient;
import javax.validation.constraints.NotNull;
import org.hibernate.envers.Audited;
/**
* Description of a remote Irida API that this API can communicate with via
* OAuth2
*
* @author Thomas Matthews <thomas.matthews@phac-aspc.gc.ca>
*
*/
@Entity
@Table(name = "remote_api")
@Audited
public class RemoteAPI implements Comparable<RemoteAPI> {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@Transient
private URI serviceURI;
// keeping a string representation of the service URI so it's stored nicer
// in the database
@NotNull
@Column(name = "serviceURI")
private String stringServiceURI;
private String description;
@NotNull
private String clientId;
@NotNull
private String clientSecret;
@OneToMany(mappedBy = "remoteApi")
private Collection<RemoteAPIToken> tokens;
public RemoteAPI() {
}
public RemoteAPI(URI serviceURI, String description, String clientId, String clientSecret) {
this.serviceURI = serviceURI;
this.description = description;
this.clientId = clientId;
this.clientSecret = clientSecret;
}
/**
* Setting the proper service URI after load
*
* @throws URISyntaxException
*/
@PostLoad
public void postLoad() throws URISyntaxException {
serviceURI = new URI(stringServiceURI);
}
/**
* Setting the string service URI before we store it in the database
*/
@PrePersist
@PreUpdate
public void prePersist() {
stringServiceURI = serviceURI.toString();
}
/**
* Get the entity id
*
* @return
*/
public Long getId() {
return id;
}
/**
* Set the entity idea
*
* @param id
*/
public void setId(Long id) {
this.id = id;
}
/**
* Get the base URI of this remote api
*
* @return
*/
public URI getServiceURI() {
return serviceURI;
}
/**
* Set the base URI of this remote service
*
* @param serviceURI
*/
public void setServiceURI(URI serviceURI) {
this.serviceURI = serviceURI;
}
/**
* Get a description of the remote api
*
* @return
*/
public String getDescription() {
return description;
}
/**
* Set the description for the remote api
*
* @param description
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @return the clientId
*/
public String getClientId() {
return clientId;
}
/**
* @param clientId
* the clientId to set
*/
public void setClientId(String clientId) {
this.clientId = clientId;
}
/**
* @return the clientSecret
*/
public String getClientSecret() {
return clientSecret;
}
/**
* @param clientSecret
* the clientSecret to set
*/
public void setClientSecret(String clientSecret) {
this.clientSecret = clientSecret;
}
@Override
public String toString() {
return "RemoteAPI [" + clientId + ", " + serviceURI + ", " + description + "]";
}
@Override
public boolean equals(Object other) {
if (other instanceof RemoteAPI) {
RemoteAPI p = (RemoteAPI) other;
return Objects.equals(serviceURI, p.serviceURI) && Objects.equals(clientId, p.clientId)
&& Objects.equals(clientSecret, p.clientSecret);
}
return false;
}
@Override
public int compareTo(RemoteAPI o) {
return serviceURI.compareTo(o.serviceURI);
}
@Override
public int hashCode() {
return Objects.hash(serviceURI, clientId, clientSecret);
}
}
|
package co.andrewbates.grade.controller;
import org.controlsfx.dialog.ExceptionDialog;
import co.andrewbates.grade.GradePreferences;
import co.andrewbates.grade.data.Database;
import co.andrewbates.grade.data.Database.Loader;
import javafx.animation.PauseTransition;
import javafx.fxml.FXML;
import javafx.scene.control.Label;
import javafx.scene.control.ProgressBar;
import javafx.stage.Stage;
import javafx.util.Duration;
public class InitializeController {
private static final long SPLASH_DELAY = 1000;
@FXML
ProgressBar progressBar;
@FXML
Label progressText;
Loader loader;
long startTime;
private void start(Stage stage) {
stage.showingProperty().addListener((observable, ov, nv) -> {
if (nv) {
startTime = System.currentTimeMillis();
stage.toFront();
}
});
loader.valueProperty().addListener((o, ov, nv) -> {
progressText.setText(nv);
});
loader.setOnSucceeded((state) -> {
long finish = System.currentTimeMillis();
if (finish - startTime < SPLASH_DELAY) {
PauseTransition delay = new PauseTransition(new Duration(SPLASH_DELAY - (finish - startTime)));
delay.setOnFinished(ev -> {
stage.hide();
});
delay.play();
} else {
stage.hide();
}
});
loader.setOnFailed((state) -> {
new ExceptionDialog(loader.getException()).showAndWait();
stage.hide();
});
}
public void initialize() {
loader = Database.load(GradePreferences.dataDirectory());
progressBar.progressProperty().bind(loader.progressProperty());
progressBar.sceneProperty().addListener((os, osv, nsv) -> {
if (nsv != null) {
nsv.windowProperty().addListener((ow, owv, nwv) -> {
if (nwv != null) {
start((Stage) nwv);
}
});
}
});
new Thread(loader).start();
}
}
|
package idle.land.app.ui;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.EditText;
import android.widget.Toast;
import butterknife.ButterKnife;
import butterknife.InjectView;
import butterknife.OnClick;
import com.squareup.otto.Subscribe;
import idle.land.app.R;
import idle.land.app.logic.AccountManager;
import idle.land.app.logic.BusProvider;
import idle.land.app.logic.Preferences;
import idle.land.app.logic.api.HeartbeatEvent;
import idle.land.app.logic.api.HeartbeatService;
public class LoginActivity extends ActionBarActivity implements CompoundButton.OnCheckedChangeListener {
public static final String TAG = LoginActivity.class.getSimpleName();
@InjectView(R.id.etLoginName)
EditText etLoginName;
@InjectView(R.id.etLoginPassword)
EditText etLoginPassword;
@InjectView(R.id.cbLoginRemember)
CheckBox cbRemember;
AccountManager mAccountManager;
Preferences mPrefs;
boolean remember;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.login_frag);
getSupportActionBar().hide();
ButterKnife.inject(this);
mAccountManager = AccountManager.getInstance();
mPrefs = new Preferences();
remember = mPrefs.getBoolean(Preferences.Property.ACC_REMEMBER);
cbRemember.setChecked(remember);
cbRemember.setOnCheckedChangeListener(this);
if(remember)
{
AccountManager.Account account = mAccountManager.get();
if(account != null)
{
if(account.appName.equals(getString(R.string.app_ident)))
etLoginName.setText(account.username);
else
etLoginName.setText(account.getIdentifier());
etLoginPassword.setText(account.password);
}
}
}
@OnClick(R.id.btLoginLogin)
public void onLoginButtonClick()
{
login();
}
@OnClick(R.id.btLoginRegister)
public void onRegisterButtonClick()
{
Toast.makeText(this, "Not implemented", Toast.LENGTH_SHORT).show();
}
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
remember = isChecked;
}
/**
* performs the login operation
*/
private void login()
{
// parse login information
String identifier = etLoginName.getText().toString().trim();
String username;
String appName;
if(identifier.contains("
{
String[] split = identifier.split("
appName = split[0];
username = split[1];
} else
{
username = identifier;
appName = getString(R.string.app_ident);
}
String password = etLoginPassword.getText().toString().trim();
// update account information und start the login process
if(mAccountManager.update(username, appName, password))
{
mAccountManager.setRemember(remember);
mAccountManager.updateToken(null); // reset token on login
startService(new Intent(LoginActivity.this, HeartbeatService.class));
}
else
Toast.makeText(this, "Invalid Credentials. TODO: Reason", Toast.LENGTH_SHORT).show();
}
private void openMainActivity()
{
startActivity(new Intent(LoginActivity.this, MainActivity.class));
finish();
}
@Override
protected void onResume() {
super.onResume();
BusProvider.getInstance().register(this);
}
@Override
protected void onPause() {
BusProvider.getInstance().unregister(this);
super.onPause();
}
@Subscribe
public void onHeartbeatEvent(HeartbeatEvent event)
{
if(event.type == HeartbeatEvent.EventType.LOGGED_IN || event.type == HeartbeatEvent.EventType.HEARTBEAT)
openMainActivity();
}
}
|
package com.alibaba.fastjson.support.spring;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONPObject;
import com.alibaba.fastjson.serializer.SerializeFilter;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.alibaba.fastjson.support.config.FastJsonConfig;
import com.alibaba.fastjson.util.IOUtils;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.validation.BindingResult;
import org.springframework.web.servlet.view.AbstractView;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.ByteArrayOutputStream;
import java.nio.charset.Charset;
import java.util.*;
import java.util.regex.Pattern;
/**
* Fastjson for Spring MVC View.
*
* @author libinsong1204@gmail.com
* @author VictorZeng
* @see AbstractView
* @since 1.2.9
*/
public class FastJsonJsonView extends AbstractView {
/**
* default content type
*/
public static final String DEFAULT_CONTENT_TYPE = "application/json;charset=UTF-8";
/**
* Default content type for JSONP: "application/javascript".
*/
public static final String DEFAULT_JSONP_CONTENT_TYPE = "application/javascript";
/**
* Pattern for validating jsonp callback parameter values.
*/
private static final Pattern CALLBACK_PARAM_PATTERN = Pattern.compile("[0-9A-Za-z_\\.]*");
@Deprecated
protected Charset charset = Charset.forName("UTF-8");
@Deprecated
protected SerializerFeature[] features = new SerializerFeature[0];
@Deprecated
protected SerializeFilter[] filters = new SerializeFilter[0];
@Deprecated
protected String dateFormat;
/**
* renderedAttributes
*/
private Set<String> renderedAttributes;
/**
* disableCaching
*/
private boolean disableCaching = true;
/**
* updateContentLength
*/
private boolean updateContentLength = true;
/**
* extractValueFromSingleKeyModel
*/
private boolean extractValueFromSingleKeyModel = false;
/**
* with fastJson config
*/
private FastJsonConfig fastJsonConfig = new FastJsonConfig();
/**
* jsonp parameter name
*/
private String[] jsonpParameterNames = {"jsonp", "callback"};
/**
* Set default param.
*/
public FastJsonJsonView() {
setContentType(DEFAULT_CONTENT_TYPE);
setExposePathVariables(false);
}
/**
* @return the fastJsonConfig.
* @since 1.2.11
*/
public FastJsonConfig getFastJsonConfig() {
return fastJsonConfig;
}
/**
* @param fastJsonConfig the fastJsonConfig to set.
* @since 1.2.11
*/
public void setFastJsonConfig(FastJsonConfig fastJsonConfig) {
this.fastJsonConfig = fastJsonConfig;
}
@Deprecated
public void setSerializerFeature(SerializerFeature... features) {
this.fastJsonConfig.setSerializerFeatures(features);
}
@Deprecated
public Charset getCharset() {
return this.fastJsonConfig.getCharset();
}
@Deprecated
public void setCharset(Charset charset) {
this.fastJsonConfig.setCharset(charset);
}
@Deprecated
public String getDateFormat() {
return this.fastJsonConfig.getDateFormat();
}
@Deprecated
public void setDateFormat(String dateFormat) {
this.fastJsonConfig.setDateFormat(dateFormat);
}
@Deprecated
public SerializerFeature[] getFeatures() {
return this.fastJsonConfig.getSerializerFeatures();
}
@Deprecated
public void setFeatures(SerializerFeature... features) {
this.fastJsonConfig.setSerializerFeatures(features);
}
@Deprecated
public SerializeFilter[] getFilters() {
return this.fastJsonConfig.getSerializeFilters();
}
@Deprecated
public void setFilters(SerializeFilter... filters) {
this.fastJsonConfig.setSerializeFilters(filters);
}
/**
* Set renderedAttributes.
*
* @param renderedAttributes renderedAttributes
*/
public void setRenderedAttributes(Set<String> renderedAttributes) {
this.renderedAttributes = renderedAttributes;
}
/**
* Check extractValueFromSingleKeyModel.
*
* @return extractValueFromSingleKeyModel
*/
public boolean isExtractValueFromSingleKeyModel() {
return extractValueFromSingleKeyModel;
}
/**
* Set extractValueFromSingleKeyModel.
*
* @param extractValueFromSingleKeyModel
*/
public void setExtractValueFromSingleKeyModel(
boolean extractValueFromSingleKeyModel) {
this.extractValueFromSingleKeyModel = extractValueFromSingleKeyModel;
}
public void setJsonpParameterNames(Set<String> jsonpParameterNames) {
Assert.notEmpty(jsonpParameterNames, "jsonpParameterName cannot be empty");
this.jsonpParameterNames = jsonpParameterNames.toArray(new String[jsonpParameterNames.size()]);
}
private String getJsonpParameterValue(HttpServletRequest request) {
if (this.jsonpParameterNames != null) {
for (String name : this.jsonpParameterNames) {
String value = request.getParameter(name);
if (IOUtils.isValidJsonpQueryParam(value)) {
return value;
}
if (logger.isDebugEnabled()) {
logger.debug("Ignoring invalid jsonp parameter value: " + value);
}
}
}
return null;
}
@Override
protected void renderMergedOutputModel(Map<String, Object> model,
HttpServletRequest request,
HttpServletResponse response) throws Exception {
Object value = filterModel(model);
String jsonpParameterValue = getJsonpParameterValue(request);
if(jsonpParameterValue != null) {
JSONPObject jsonpObject = new JSONPObject(jsonpParameterValue);
jsonpObject.addParameter(value);
value = jsonpObject;
}
ByteArrayOutputStream outnew = new ByteArrayOutputStream();
int len = JSON.writeJSONString(outnew,
fastJsonConfig.getCharset(),
value,
fastJsonConfig.getSerializeConfig(),
fastJsonConfig.getSerializeFilters(),
fastJsonConfig.getDateFormat(),
JSON.DEFAULT_GENERATE_FEATURE,
fastJsonConfig.getSerializerFeatures());
if (this.updateContentLength) {
// Write content length (determined via byte array).
response.setContentLength(len);
}
// Flush byte array to servlet output stream.
ServletOutputStream out = response.getOutputStream();
outnew.writeTo(out);
outnew.close();
out.flush();
}
@Override
protected void prepareResponse(HttpServletRequest request,
HttpServletResponse response) {
setResponseContentType(request, response);
response.setCharacterEncoding(fastJsonConfig.getCharset().name());
if (this.disableCaching) {
response.addHeader("Pragma", "no-cache");
response.addHeader("Cache-Control", "no-cache, no-store, max-age=0");
response.addDateHeader("Expires", 1L);
}
}
/**
* Disables caching of the generated JSON.
* <p>
* Default is {@code true}, which will prevent the client from caching the
* generated JSON.
*/
public void setDisableCaching(boolean disableCaching) {
this.disableCaching = disableCaching;
}
/**
* Whether to update the 'Content-Length' header of the response. When set
* to {@code true}, the response is buffered in order to determine the
* content length and set the 'Content-Length' header of the response.
* <p>
* The default setting is {@code false}.
*/
public void setUpdateContentLength(boolean updateContentLength) {
this.updateContentLength = updateContentLength;
}
/**
* Filters out undesired attributes from the given model. The return value
* can be either another {@link Map}, or a single value object.
* <p>
* Default implementation removes {@link BindingResult} instances and
* entries not included in the {@link #setRenderedAttributes(Set)
* renderedAttributes} property.
*
* @param model the model, as passed on to {@link #renderMergedOutputModel}
* @return the object to be rendered
*/
protected Object filterModel(Map<String, Object> model) {
Map<String, Object> result = new HashMap<String, Object>(model.size());
Set<String> renderedAttributes = !CollectionUtils.isEmpty(this.renderedAttributes) ?
this.renderedAttributes
: model.keySet();
for (Map.Entry<String, Object> entry : model.entrySet()) {
if (!(entry.getValue() instanceof BindingResult)
&& renderedAttributes.contains(entry.getKey())) {
result.put(entry.getKey(), entry.getValue());
}
}
if (extractValueFromSingleKeyModel) {
if (result.size() == 1) {
for (Map.Entry<String, Object> entry : result.entrySet()) {
return entry.getValue();
}
}
}
return result;
}
@Override
protected void setResponseContentType(HttpServletRequest request, HttpServletResponse response) {
if (getJsonpParameterValue(request) != null) {
response.setContentType(DEFAULT_JSONP_CONTENT_TYPE);
}
else {
super.setResponseContentType(request, response);
}
}
}
|
package com.booking.replication.util;
import com.booking.replication.sql.QueryInspector;
import com.booking.replication.sql.exception.QueryInspectorException;
import org.apache.commons.dbcp2.BasicDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.*;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Predicate;
public class BinlogCoordinatesFinder {
private static final Logger LOGGER = LoggerFactory.getLogger(BinlogCoordinatesFinder.class);
public static class BinlogCoordinates {
private String fileName;
private long position;
public BinlogCoordinates(String fileName, long position) {
this.fileName = fileName;
this.position = position;
}
public String getFileName() {
return fileName;
}
public long getPosition() {
return position;
}
}
private static class Holder<T> {
private T value;
public T getValue() {
return value;
}
public void setValue(T value) {
this.value = value;
}
}
private final String host;
private final int port;
private final String username;
private final String password;
private final QueryInspector queryInspector;
public BinlogCoordinatesFinder(String host, int port, String username, String password, QueryInspector queryInspector) {
this.host = host;
this.port = port;
this.username = username;
this.password = password;
this.queryInspector = queryInspector;
}
public BinlogCoordinates findCoordinates(String gtid) {
BasicDataSource source = new BasicDataSource();
source.setDriverClassName("com.mysql.jdbc.Driver");
source.setUsername(username);
source.setPassword(password);
source.setUrl( String.format("jdbc:mysql://%s:%s", host, port) );
source.addConnectionProperty("useUnicode", "true");
source.addConnectionProperty("characterEncoding", "UTF-8");
try ( Connection connection = source.getConnection() ){
String[] files = getBinaryLogs(connection);
String file = findFile(gtid, files, connection);
long position = findPosition(gtid, file, connection);
return new BinlogCoordinates(file,position);
} catch (SQLException | QueryInspectorException e) {
LOGGER.error("Failed to find binlog coordinates for gtid ", e);
throw new RuntimeException(e);
}
}
private long findPosition(final String gtid, String file, Connection connection) throws QueryInspectorException, SQLException {
final Holder<Long> position = new Holder<>();
findEvent(resultSet-> {
try {
String query = resultSet.getString( "Info" );
if ( queryInspector.isPseudoGTID(query) && gtid.equals( queryInspector.extractPseudoGTID(query) ) ){
position.setValue( resultSet.getLong("Pos"));
return true;
}
return false;
} catch (SQLException | QueryInspectorException e) {
throw new RuntimeException(e);
}
}, file, connection);
if ( position.getValue() == null ) throw new RuntimeException(String.format("Binlog file %s does not contain given GTID", file));
return position.getValue();
}
private void findEvent(Predicate<ResultSet> condition, String file, Connection connection) throws SQLException {
try ( PreparedStatement statement = connection.prepareStatement("SHOW BINLOG EVENTS IN ? LIMIT ?,?")){
int start = 0;
int limit = 500;
for (;;){
statement.setString(1, file);
statement.setInt(2,start);
statement.setInt(3,limit);
try ( ResultSet results = statement.executeQuery() ) {
boolean empty = true;
while (results.next()) {
empty = false;
if (condition.test(results)) return;
}
if (empty) return;
}
start += limit;
}
}
}
private String findFile(String gtid, String[] files, Connection connection) throws QueryInspectorException, SQLException {
int l = 0;
int h = files.length - 1;
int cmp;
if ( gtid.compareToIgnoreCase( getFirstGTID( files[h], connection ) ) >= 0 ) return files[h];
cmp = gtid.compareToIgnoreCase( getFirstGTID( files[l], connection ) );
if ( cmp < 0 ) {
throw new RuntimeException("No binlog file contain the given GTID ");
} else if ( cmp ==0 ){
return files[l];
}
// we maintain invariant GTID(l) < gtid < GTID(h) and we need files[i] such that GTID(i) <= gtid < GTID(i+1)
while ( h - l > 1){
int m = l + ( h - l) / 2; // l < m < h
cmp = gtid.compareToIgnoreCase( getFirstGTID( files[m], connection ) );
if (cmp == 0) {
return files[m];
} else if (cmp > 0){
l = m; // maintain gtid > GTID(l)
} else {
h = m; // maintain gtid < GTID(h)
}
}
// h = l + 1, GTID(l) < gtid < GTID(h)
return files[l];
}
private String getFirstGTID(String file, Connection connection) throws SQLException {
LOGGER.info(String.format("Getting first GTID from %s...", file));
final Holder<String> gtidHolder = new Holder<>();
findEvent( resultSet -> {
try {
String query = resultSet.getString( "Info" );
if ( queryInspector.isPseudoGTID( query ) ){
gtidHolder.setValue( queryInspector.extractPseudoGTID( query ) );
return true;
}
return false;
} catch (SQLException | QueryInspectorException e) {
throw new RuntimeException(e);
}
} , file, connection);
String gtid = gtidHolder.getValue();
if (gtid == null) throw new RuntimeException(String.format("Binlog file %s does not contain any GTID", file));
LOGGER.info(String.format("First GTID in %s is %s", file, gtid));
return gtid;
}
private String[] getBinaryLogs( Connection connection ) throws SQLException{
try ( Statement statement = connection.createStatement();
ResultSet result = statement.executeQuery("SHOW BINARY LOGS;") ){
List<String> files = new ArrayList<>();
while ( result.next() ){
files.add( result.getString("Log_name") );
}
return files.toArray(new String[files.size()]);
}
}
}
|
package com.crawljax.plugins.crawloverview;
import static org.apache.commons.lang.StringEscapeUtils.escapeHtml;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.Map.Entry;
import org.apache.commons.lang3.tuple.ImmutablePair;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedMap;
/**
* Parses a Java bean to a map containing the getter name as the key and the field value as the
* value. All instances of {@link Collection} are converted to html lists.
*/
class BeanToReadableMap {
public static interface Filter {
Entry<String, String> filter(String key, String value);
}
private static final String CAMEL_REGEX = String.format("%s|%s|%s",
"(?<=[A-Z])(?=[A-Z][a-z])",
"(?<=[^A-Z])(?=[A-Z])",
"(?<=[A-Za-z])(?=[^A-Za-z])");
private static final Filter[] EMPTY_FILTERS = new Filter[] {};
public static ImmutableMap<String, String> toMap(Object o) {
return toMap(o, EMPTY_FILTERS);
}
public static ImmutableSortedMap<String, String> toMap(Object o, Filter... filters) {
ImmutableSortedMap.Builder<String, String> builder = ImmutableSortedMap.naturalOrder();
for (Method method : o.getClass().getMethods()) {
if (isGetter(method)) {
builder.put(addmethodToMap(o, method, filters));
}
}
return builder.build();
}
private static Entry<String, String> addmethodToMap(Object o, Method method, Filter[] filters) {
try {
Object[] noArgs = null;
Object result = method.invoke(o, noArgs);
return ImmutablePair.of(asName(method), toString(result));
} catch (Exception e) {
throw new CrawlOverviewException("Could not parse bean " + o.toString()
+ " because " + e.getMessage(), e);
}
}
private static String toString(Object result) {
if (result instanceof Collection<?>) {
return asHtmlList((Collection<?>) result);
} else {
return escapeHtml(result.toString());
}
}
private static String asName(Method method) {
String name = method.getName().substring(3);
return splitCamelCase(name);
}
private static String asHtmlList(Collection<?> result) {
if (result.isEmpty()) {
return "";
}
StringBuilder sb = new StringBuilder("<ul>");
for (Object object : result) {
sb.append("<li>").append(escapeHtml(object.toString())).append("</li>");
}
return sb.append("</ul>").toString();
}
private static boolean isGetter(Method method) {
return method.getName().startsWith("get")
&& method.getParameterTypes().length == 0
&& !"getClass".equals(method.getName());
}
private static String splitCamelCase(String s) {
return s.replaceAll(CAMEL_REGEX, " ");
}
private BeanToReadableMap() {
}
}
|
package com.creatubbles.ctbmod.common.http;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.EnumMap;
import java.util.concurrent.Executor;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import javax.imageio.ImageIO;
import lombok.Getter;
import lombok.SneakyThrows;
import lombok.ToString;
import lombok.Value;
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.texture.AbstractTexture;
import net.minecraft.client.renderer.texture.ITextureObject;
import net.minecraft.client.renderer.texture.TextureManager;
import net.minecraft.client.renderer.texture.TextureUtil;
import net.minecraft.client.resources.IResourceManager;
import net.minecraft.util.ResourceLocation;
import org.lwjgl.util.Dimension;
import com.creatubbles.api.core.Creation;
import com.creatubbles.api.core.Image;
import com.creatubbles.api.core.Image.ImageType;
import com.creatubbles.ctbmod.CTBMod;
import com.creatubbles.ctbmod.common.config.DataCache;
import com.creatubbles.ctbmod.common.util.JsonUtil;
import com.google.common.collect.Maps;
import com.google.common.collect.Queues;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
@ToString
public class DownloadableImage {
@Value
private static class Size {
private int width, height, scaled;
private Dimension dimension;
private Size() {
this(0, 0, 0);
}
public Size(int width, int height, int scaled) {
this.width = width;
this.height = height;
this.scaled = scaled;
dimension = new Dimension(width, height);
}
public static Size create(BufferedImage actual, BufferedImage rescale) {
return new Size(actual.getWidth(), actual.getHeight(), rescale.getWidth());
}
}
private static class RescaledTexture extends AbstractTexture {
private final int[] textureData;
@Getter
private final Size size;
public RescaledTexture(BufferedImage actual, BufferedImage rescale) {
textureData = new int[rescale.getWidth() * rescale.getHeight()];
rescale.getRGB(0, 0, rescale.getWidth(), rescale.getHeight(), getTextureData(), 0, rescale.getWidth());
size = Size.create(actual, rescale);
}
void uploadTexture() {
TextureUtil.allocateTexture(this.getGlTextureId(), size.getScaled(), size.getScaled());
updateDynamicTexture();
}
@Override
public void loadTexture(IResourceManager resourceManager) throws IOException {}
public void updateDynamicTexture() {
TextureUtil.uploadTexture(this.getGlTextureId(), getTextureData(), size.getScaled(), size.getScaled());
}
public int[] getTextureData() {
return this.textureData;
}
}
public static final ResourceLocation MISSING_TEXTURE = new ResourceLocation("missingno");
private static Executor downloadExecutor = new ThreadPoolExecutor(0, 3, 20, TimeUnit.SECONDS, Queues.<Runnable> newLinkedBlockingQueue());
static {
Minecraft.getMinecraft().getTextureManager().loadTexture(MISSING_TEXTURE, TextureUtil.missingTexture);
}
@Getter
private Creation owner;
private transient final EnumMap<ImageType, ResourceLocation> locations = Maps.newEnumMap(ImageType.class);
private transient EnumMap<ImageType, Size> sizes = Maps.newEnumMap(ImageType.class);
private transient Image parent;
public DownloadableImage() {
initDefaults();
}
public DownloadableImage(Image image, Creation owner) {
this.parent = image;
this.owner = owner;
initDefaults();
}
private void initDefaults() {
for (com.creatubbles.api.core.Image.ImageType type : com.creatubbles.api.core.Image.ImageType.values()) {
locations.put(type, MISSING_TEXTURE);
sizes.put(type, new Size());
}
}
/**
* Gets the bindable {@link ResourceLocation} for the given {@link ImageType type}.
*
* @param type
* The {@link ImageType} to get the resource for.
* @return A {@link ResourceLocation}, which may be a dummy if this Image has not been downloaded, or is in the
* process of being downloaded.
*/
public ResourceLocation getResource(ImageType type) {
return locations.get(type);
}
private Size getSize(ImageType type) {
return sizes.get(type);
}
/**
* The dimensions for this image.
*
* @param type
* The {@link ImageType} to get the dimensions for.
* @return An {@link Dimension} representing the size of this image. May be zero if the image is not downloaded.
*/
public Dimension getDimensions(ImageType type) {
return getSize(type).getDimension();
}
/**
* The width of this image.
*
* @param type
* The {@link ImageType} to get the width for.
* @return The width of this image. May be zero if the image is not downloaded.
*/
public int getWidth(ImageType type) {
return getSize(type).getWidth();
}
/**
* The height of this image.
*
* @param type
* The {@link ImageType} to get the height for.
* @return The height of this image. May be zero if the image is not downloaded.
*/
public int getHeight(ImageType type) {
return getSize(type).getHeight();
}
/**
* To avoid issues with certain GPUs, the in-memory image is scaled up to the nearest power of two square dimension.
* This method returns that value for use in rendering.
*
* @param type
* The {@link ImageType} to get the size for.
* @return The scaled size of this image.
*/
public int getScaledSize(ImageType type) {
return getSize(type).getScaled();
}
/**
* This method is not blocking, but note that {@link #getSize(ImageType)} will return a 0-size rectangle before the
* image finishes downloading. Check for this with {@link #hasSize(ImageType)}.
*
* @param type
* The {@link ImageType} to download.
* @see #updateSize(ImageType)
*/
@SneakyThrows
public void download(final ImageType type) {
if (locations.get(type) == MISSING_TEXTURE) {
TextureManager texturemanager = Minecraft.getMinecraft().getTextureManager();
final String filepath = "creations/" + owner.user_id + "/" + type.name() + "/" + owner.id + ".jpg";
final ResourceLocation res = new ResourceLocation(CTBMod.DOMAIN, filepath);
ITextureObject texture = texturemanager.getTexture(res);
if (texture == null) {
downloadExecutor.execute(new Runnable() {
@Override
@SneakyThrows
public void run() {
String url = parent.links.get(type);
File cache = new File(DataCache.cacheFolder, filepath);
BufferedImage image = null;
if (cache.exists()) {
image = ImageIO.read(cache);
} else {
image = ImageIO.read(new URL(url));
cache.getParentFile().mkdirs();
try {
// Cache the original, not the resize, this way we do not lose original size data
ImageIO.write(image, "png", cache);
} catch (IOException e) {
// This is not strictly necessary, so we'll just log an error and download the image
// again next time
CTBMod.logger.error("Could not save image {} to {}. Stacktrace: ", cache.getName(), cache.getAbsolutePath());
e.printStackTrace();
}
}
final BufferedImage original = image;
// Find the biggest dimension of the image
int maxDim = Math.max(image.getWidth(), image.getHeight());
// Find nearest PoT which can contain the downloaded/read image
int targetDim = 2;
while (targetDim < maxDim) {
targetDim *= 2;
}
// Create a blank image with PoT size
final BufferedImage resized = new BufferedImage(targetDim, targetDim, image.getType());
// Write the downloaded image into the top left of the blank image
Graphics2D graphics = resized.createGraphics();
try {
graphics.drawImage(image, 0, 0, null);
} finally {
graphics.dispose();
}
ImageIO.write(resized, "png", new File(cache.getParentFile(), cache.getName().replace(".", "-resized.")));
final RescaledTexture texture = new RescaledTexture(original, resized);
// Do this on the main thread with GL context
Minecraft.getMinecraft().addScheduledTask(new Runnable() {
@Override
public void run() {
texture.uploadTexture();
Minecraft.getMinecraft().getTextureManager().loadTexture(res, texture);
// Don't populate size and location data until after the texture is loaded
sizes.put(type, Size.create(original, resized));
locations.put(type, res);
}
});
}
});
} else if (texture instanceof RescaledTexture) {
// Grab cached size data
sizes.put(type, ((RescaledTexture) texture).getSize());
}
locations.put(type, res);
}
}
/**
* Checks if the size for the given type has been initialized
*
* @param type
* The {@link ImageType} to check for.
* @return True if the size for this type has been initialized. False otherwise.
*/
public boolean hasSize(ImageType type) {
return sizes.get(type).getHeight() != 0;
}
public static GsonBuilder registerGsonAdapters(GsonBuilder builder) {
builder.registerTypeAdapter(new TypeToken<EnumMap<ImageType, ResourceLocation>>() {}.getType(), new JsonUtil.EnumMapInstanceCreator<ImageType, ResourceLocation>(ImageType.class));
builder.registerTypeAdapter(new TypeToken<EnumMap<ImageType, Size>>() {}.getType(), new JsonUtil.EnumMapInstanceCreator<ImageType, Size>(ImageType.class));
return builder;
}
}
|
package com.elmakers.mine.bukkit.action.builtin;
import com.elmakers.mine.bukkit.action.BaseSpellAction;
import com.elmakers.mine.bukkit.api.action.CastContext;
import com.elmakers.mine.bukkit.api.magic.Mage;
import com.elmakers.mine.bukkit.api.magic.MageController;
import com.elmakers.mine.bukkit.api.spell.Spell;
import com.elmakers.mine.bukkit.api.spell.SpellResult;
import com.elmakers.mine.bukkit.spell.BaseSpell;
import com.elmakers.mine.bukkit.utility.CompatibilityUtils;
import org.bukkit.attribute.Attribute;
import org.bukkit.attribute.AttributeInstance;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Damageable;
import org.bukkit.entity.Entity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import java.util.Arrays;
import java.util.Collection;
public class DamageAction extends BaseSpellAction
{
private double entityDamage;
private double playerDamage;
private double elementalDamage;
private boolean magicDamage;
private boolean magicEntityDamage;
private Double percentage;
private Double knockbackResistance;
@Override
public void prepare(CastContext context, ConfigurationSection parameters)
{
super.prepare(context, parameters);
double damage = parameters.getDouble("damage", 1);
entityDamage = parameters.getDouble("entity_damage", damage);
playerDamage = parameters.getDouble("player_damage", damage);
elementalDamage = parameters.getDouble("elemental_damage", damage);
if (parameters.contains("percentage")) {
percentage = parameters.getDouble("percentage");
} else {
percentage = null;
}
magicDamage = parameters.getBoolean("magic_damage", true);
magicEntityDamage = parameters.getBoolean("magic_entity_damage", true);
if (parameters.contains("knockback_resistance")) {
knockbackResistance = parameters.getDouble("knockback_resistance");
} else {
knockbackResistance = null;
}
}
@Override
public SpellResult perform(CastContext context)
{
Entity entity = context.getTargetEntity();
if (entity == null || !(entity instanceof Damageable) || entity.isDead())
{
return SpellResult.NO_TARGET;
}
double damage = 1;
Damageable targetEntity = (Damageable)entity;
LivingEntity livingTarget = (entity instanceof LivingEntity) ? (LivingEntity)entity : null;
context.registerDamaged(targetEntity);
Mage mage = context.getMage();
MageController controller = context.getController();
double previousKnockbackResistance = 0D;
try {
if (knockbackResistance != null && livingTarget != null) {
AttributeInstance knockBackAttribute = livingTarget.getAttribute(Attribute.GENERIC_KNOCKBACK_RESISTANCE);
previousKnockbackResistance = knockBackAttribute.getBaseValue();
knockBackAttribute.setBaseValue(knockbackResistance);
}
if (controller.isElemental(entity)) {
damage = elementalDamage;
controller.damageElemental(entity, damage * mage.getDamageMultiplier(), 0, mage.getCommandSender());
} else {
if (percentage != null) {
damage = percentage * targetEntity.getMaxHealth();
} else if (targetEntity instanceof Player) {
damage = playerDamage;
} else {
damage = entityDamage;
}
damage *= mage.getDamageMultiplier();
if (magicDamage && (magicEntityDamage || targetEntity instanceof Player)) {
CompatibilityUtils.magicDamage(targetEntity, damage, mage.getEntity());
} else {
CompatibilityUtils.damage(targetEntity, damage, mage.getEntity());
}
}
} finally {
if (knockbackResistance != null && livingTarget != null) {
AttributeInstance knockBackAttribute = livingTarget.getAttribute(Attribute.GENERIC_KNOCKBACK_RESISTANCE);
knockBackAttribute.setBaseValue(previousKnockbackResistance);
}
}
return SpellResult.CAST;
}
@Override
public boolean isUndoable()
{
return true;
}
@Override
public void getParameterNames(Spell spell, Collection<String> parameters) {
super.getParameterNames(spell, parameters);
parameters.add("damage");
parameters.add("player_damage");
parameters.add("entity_damage");
parameters.add("elemental_damage");
parameters.add("magic_damage");
parameters.add("percentage");
}
@Override
public void getParameterOptions(Spell spell, String parameterKey, Collection<String> examples) {
if (parameterKey.equals("damage") || parameterKey.equals("player_damage")
|| parameterKey.equals("entity_damage") || parameterKey.equals("elemental_damage")) {
examples.addAll(Arrays.asList((BaseSpell.EXAMPLE_SIZES)));
} else if (parameterKey.equals("magic_damage")) {
examples.addAll(Arrays.asList((BaseSpell.EXAMPLE_BOOLEANS)));
} else if (parameterKey.equals("percentage")) {
examples.addAll(Arrays.asList((BaseSpell.EXAMPLE_PERCENTAGES)));
} else {
super.getParameterOptions(spell, parameterKey, examples);
}
}
@Override
public boolean requiresTargetEntity()
{
return true;
}
}
|
package com.geccocrawler.gecco.dynamic;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.geccocrawler.gecco.annotation.Ajax;
import com.geccocrawler.gecco.annotation.Attr;
import com.geccocrawler.gecco.annotation.FieldRenderName;
import com.geccocrawler.gecco.annotation.Href;
import com.geccocrawler.gecco.annotation.Html;
import com.geccocrawler.gecco.annotation.HtmlField;
import com.geccocrawler.gecco.annotation.Image;
import com.geccocrawler.gecco.annotation.JSONPath;
import com.geccocrawler.gecco.annotation.Request;
import com.geccocrawler.gecco.annotation.RequestParameter;
import com.geccocrawler.gecco.annotation.Text;
import javassist.CtClass;
import javassist.CtField;
import javassist.NotFoundException;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.ConstPool;
import javassist.bytecode.FieldInfo;
import javassist.bytecode.annotation.Annotation;
import javassist.bytecode.annotation.ArrayMemberValue;
import javassist.bytecode.annotation.BooleanMemberValue;
import javassist.bytecode.annotation.MemberValue;
import javassist.bytecode.annotation.StringMemberValue;
/**
*
*
* @author huchengyi
*
*/
public class JavassistDynamicField implements DynamicField {
private static Log log = LogFactory.getLog(JavassistDynamicField.class);
private DynamicBean dynamicBean;
private CtField cfield;
private ConstPool cpool;
private AnnotationsAttribute attr;
public JavassistDynamicField(DynamicBean dynamicBean, CtClass clazz, ConstPool cpool, String fieldName) {
try {
this.dynamicBean = dynamicBean;
this.cpool = cpool;
this.cfield = clazz.getField(fieldName);
attr = new AnnotationsAttribute(cpool, AnnotationsAttribute.visibleTag);
} catch (NotFoundException e) {
log.error(fieldName + " not found");
}
}
public DynamicBean build() {
FieldInfo finfo = cfield.getFieldInfo();
finfo.addAttribute(attr);
return dynamicBean;
}
@Deprecated
@Override
public DynamicField htmlField(String cssPath) {
return csspath(cssPath);
}
@Override
public DynamicField csspath(String cssPath) {
Annotation annot = new Annotation(HtmlField.class.getName(), cpool);
annot.addMemberValue("cssPath", new StringMemberValue(cssPath, cpool));
attr.addAnnotation(annot);
return this;
}
@Override
public DynamicField text(boolean own) {
Annotation annot = new Annotation(Text.class.getName(), cpool);
annot.addMemberValue("own", new BooleanMemberValue(own, cpool));
attr.addAnnotation(annot);
return this;
}
@Override
public DynamicField text() {
return text(true);
}
@Override
public DynamicField html(boolean outer) {
Annotation annot = new Annotation(Html.class.getName(), cpool);
annot.addMemberValue("outer", new BooleanMemberValue(outer, cpool));
attr.addAnnotation(annot);
return this;
}
@Override
public DynamicField href(boolean click, String... value) {
Annotation annot = new Annotation(Href.class.getName(), cpool);
annot.addMemberValue("click", new BooleanMemberValue(click, cpool));
ArrayMemberValue arrayMemberValue = new ArrayMemberValue(cpool);
MemberValue[] memberValues = new StringMemberValue[value.length];
for(int i = 0; i < value.length; i++) {
memberValues[i] = new StringMemberValue(value[i], cpool);
}
arrayMemberValue.setValue(memberValues);
annot.addMemberValue("value", arrayMemberValue);
attr.addAnnotation(annot);
return this;
}
@Override
public DynamicField href(String... value) {
return href(false, value);
}
@Override
public DynamicField image(String download, String... value) {
Annotation annot = new Annotation(Image.class.getName(), cpool);
annot.addMemberValue("download", new StringMemberValue(download, cpool));
ArrayMemberValue arrayMemberValue = new ArrayMemberValue(cpool);
MemberValue[] memberValues = new StringMemberValue[value.length];
for(int i = 0; i < value.length; i++) {
memberValues[i] = new StringMemberValue(value[i], cpool);
}
arrayMemberValue.setValue(memberValues);
annot.addMemberValue("value", arrayMemberValue);
attr.addAnnotation(annot);
return this;
}
@Override
public DynamicField image() {
return image("");
}
@Override
public DynamicField attr(String value) {
Annotation annot = new Annotation(Attr.class.getName(), cpool);
annot.addMemberValue("value", new StringMemberValue(value, cpool));
attr.addAnnotation(annot);
return this;
}
@Override
public DynamicField ajax(String url) {
Annotation annot = new Annotation(Ajax.class.getName(), cpool);
annot.addMemberValue("url", new StringMemberValue(url, cpool));
attr.addAnnotation(annot);
return this;
}
@Override
public DynamicField request() {
Annotation annot = new Annotation(Request.class.getName(), cpool);
attr.addAnnotation(annot);
return this;
}
@Override
public DynamicField requestParameter(String param) {
Annotation annot = new Annotation(RequestParameter.class.getName(), cpool);
annot.addMemberValue("value", new StringMemberValue(param, cpool));
attr.addAnnotation(annot);
return this;
}
@Override
public DynamicField requestParameter() {
return requestParameter("");
}
@Override
public DynamicField jsvar(String var, String jsonpath) {
Annotation annot = new Annotation(JSVar.class.getName(), cpool);
annot.addMemberValue("var", new StringMemberValue(var, cpool));
annot.addMemberValue("jsonpath", new StringMemberValue(jsonpath, cpool));
attr.addAnnotation(annot);
return this;
}
@Override
public DynamicField jsvar(String var) {
return jsvar(var, "");
}
@Override
public DynamicField jsonpath(String value) {
Annotation annot = new Annotation(JSONPath.class.getName(), cpool);
annot.addMemberValue("value", new StringMemberValue(value, cpool));
attr.addAnnotation(annot);
return this;
}
@Override
public DynamicField renderName(String value) {
Annotation renderName = new Annotation(FieldRenderName.class.getName(), cpool);
renderName.addMemberValue("value", new StringMemberValue(value, cpool));
attr.addAnnotation(renderName);
return this;
}
@Override
public DynamicField customAnnotation(Annotation annotation) {
attr.addAnnotation(annotation);
return this;
}
@Override
public ConstPool getConstPool(){
return this.cpool;
}
}
|
package com.gh.mygreen.xlsmapper.annotation;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* {@link XlsHorizontalRecords}{@link XlsVerticalRecords}
*
*
* <h3 class="description"></h3>
* <p>Collection(List/Set)</p>
*
* <ul>
* <li></li>
* <li>JavaBean</li>
* <li>{@link XlsHorizontalRecords#terminateLabel()}{@link XlsHorizontalRecords#ignoreEmptyRecord()}
* </li>
* <li>{@link XlsIsEmpty}
* 00</li>
* </ul>
*
* <pre class="highlight"><code class="java">
* //
* {@literal @XlsSheet(name="")}
* public class SampleSheet {
*
* {@literal @XlsHorizontalRecords(tableLabel="")}
* private {@literal List<CategoryRecord>} categories;
* }
*
* //
* public class CategoryRecord {
*
* {@literal @XlsColumn(columnName="")}
* private String name;
*
* {@literal @XlsColumn(columnName="")}
* private String description;
*
* //
* {@literal @XlsNestedRecords}
* private {@literal List<FunctionRecord>} functions;
*
* }
*
* //
* public class FunctionRecord {
*
* {@literal @XlsColumn(columnName="")}
* private String name;
*
* {@literal @XlsColumn(columnName="")}
* private String description;
*
* //
* {@literal @XlsNestedRecords}
* private {@literal List<DetailRecord>} details;
*
* }
*
* //
* public class DetailRecord {
*
* {@literal @XlsColumn(columnName="")}
* private String name;
*
* {@literal @XlsColumn(columnName="")}
* private String value;
*
* }
* </code></pre>
*
* <div class="picture">
* <img src="doc-files/NestedRecords_oneToMany.png">
* <p></p>
* </div>
*
*
* <h3 class="description"></h3>
* <p></p>
* <p></p>
*
* <pre class="highlight"><code class="java">
* //
* {@literal @XlsSheet(name="")}
* public class SampleSheet {
*
* {@literal @XlsHorizontalRecords(tableLabel="", bottom=2)}
* private {@literal List<UserRecord>} users;
* }
*
* //
* public class UserRecord {
*
* {@literal @XlsColumn(columnName="No.")}
* private int no;
*
* {@literal @XlsColumn(columnName="", merged=true)}
* private String className;
*
* {@literal @XlsColumn(columnName="")}
* private String name;
*
* //
* {@literal @XlsNestedRecords}
* private ResultRecord result;
*
* }
*
* //
* public class UserRecord {
*
* {@literal @XlsColumn(columnName="")}
* private int kokugo;
*
* {@literal @XlsColumn(columnName="")}
* private int sansu;
*
* {@literal @XlsColumn(columnName="")}
* private int sum;
*
* }
* </code></pre>
*
*
* <div class="picture">
* <img src="doc-files/NestedRecords_oneToOne.png">
* <p></p>
* </div> *
* @since 1.4
* @author T.TSUCHIE
*
*/
@Target({ElementType.METHOD, ElementType.FIELD})
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface XlsNestedRecords {
/**
*
* <p>Generics</p>
*/
Class<?> recordClass() default Object.class;
}
|
package com.github.lunatrius.core.client.gui;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.FontRenderer;
import net.minecraft.client.gui.GuiButton;
import net.minecraft.client.gui.GuiTextField;
public class GuiNumericField extends GuiButton {
private static final int DEFAULT_VALUE = 0;
private static final int BUTTON_WIDTH = 12;
private final GuiTextField guiTextField;
private final GuiButton guiButtonDec;
private final GuiButton guiButtonInc;
private String previous = String.valueOf(DEFAULT_VALUE);
private int minimum = Integer.MIN_VALUE;
private int maximum = Integer.MAX_VALUE;
private boolean wasFocused = false;
public GuiNumericField(FontRenderer fontRenderer, int id, int x, int y) {
this(fontRenderer, id, x, y, 100, 20);
}
public GuiNumericField(FontRenderer fontRenderer, int id, int x, int y, int width) {
this(fontRenderer, id, x, y, width, 20);
}
public GuiNumericField(FontRenderer fontRenderer, int id, int x, int y, int width, int height) {
super(id, 0, 0, 0, 0, "");
this.guiTextField = new GuiTextField(fontRenderer, x, y + 1, width - BUTTON_WIDTH * 2 - 1, height - 2);
this.guiButtonDec = new GuiButton(0, x + width - BUTTON_WIDTH * 2, y, BUTTON_WIDTH, height, "-");
this.guiButtonInc = new GuiButton(1, x + width - BUTTON_WIDTH * 1, y, BUTTON_WIDTH, height, "+");
setValue(DEFAULT_VALUE);
}
@Override
public boolean mousePressed(Minecraft minecraft, int x, int y) {
if (this.wasFocused && !this.guiTextField.isFocused()) {
this.wasFocused = false;
return true;
}
this.wasFocused = this.guiTextField.isFocused();
return this.guiButtonDec.mousePressed(minecraft, x, y) || this.guiButtonInc.mousePressed(minecraft, x, y);
}
@Override
public void drawButton(Minecraft minecraft, int x, int y) {
if (this.visible) {
this.guiTextField.drawTextBox();
this.guiButtonInc.drawButton(minecraft, x, y);
this.guiButtonDec.drawButton(minecraft, x, y);
}
}
public void mouseClicked(int x, int y, int action) {
Minecraft minecraft = Minecraft.getMinecraft();
this.guiTextField.mouseClicked(x, y, action);
if (this.guiButtonInc.mousePressed(minecraft, x, y)) {
setValue(getValue() + 1);
}
if (this.guiButtonDec.mousePressed(minecraft, x, y)) {
setValue(getValue() - 1);
}
}
public boolean keyTyped(char character, int code) {
if (!this.guiTextField.isFocused()) {
return false;
}
int cursorPositionOld = this.guiTextField.getCursorPosition();
this.guiTextField.textboxKeyTyped(character, code);
String text = this.guiTextField.getText();
int cursorPositionNew = this.guiTextField.getCursorPosition();
if (text.length() == 0) {
text = String.valueOf(DEFAULT_VALUE);
}
try {
long value = Long.parseLong(text);
boolean outOfRange = false;
if (value > this.maximum) {
value = this.maximum;
outOfRange = true;
} else if (value < this.minimum) {
value = this.minimum;
outOfRange = true;
}
text = String.valueOf(value);
if (!text.equals(this.previous) || outOfRange) {
this.guiTextField.setText(String.valueOf(value));
this.guiTextField.setCursorPosition(cursorPositionNew);
}
this.previous = text;
return true;
} catch (NumberFormatException nfe) {
this.guiTextField.setText(this.previous);
this.guiTextField.setCursorPosition(cursorPositionOld);
}
return false;
}
public void updateCursorCounter() {
this.guiTextField.updateCursorCounter();
}
public void setMinimum(int minimum) {
this.minimum = minimum;
}
public int getMinimum() {
return this.minimum;
}
public void setMaximum(int maximum) {
this.maximum = maximum;
}
public int getMaximum() {
return this.maximum;
}
public void setValue(int value) {
if (value > this.maximum) {
value = this.maximum;
} else if (value < this.minimum) {
value = this.minimum;
}
this.guiTextField.setText(String.valueOf(value));
}
public int getValue() {
return Integer.parseInt(this.guiTextField.getText());
}
}
|
package com.googlecode.gmail4j.javamail;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.mail.Address;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.Session;
import javax.mail.Message.RecipientType;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import com.googlecode.gmail4j.EmailAddress;
import com.googlecode.gmail4j.GmailException;
import com.googlecode.gmail4j.GmailMessage;
public class JavaMailGmailMessage extends GmailMessage {
/**
* Original JavaMail {@link Message}
*/
final Message source;
/**
* Cache for {@link #toString()}
*/
private StringBuilder toString;
/**
* Sender's email address
*/
private EmailAddress from;
/**
* Constructor with source {@link Message}
*
* @param source JavaMail message with data
*/
public JavaMailGmailMessage(final Message source) {
this.source = source;
}
/**
* Constructor that creates a new empty JavaMail {@link MimeMessage}
*/
public JavaMailGmailMessage() {
this.source = new MimeMessage((Session) null);
}
/**
* Gets the {@link #source} {@link Message}
*
* @return source message
*/
public Message getMessage() {
return source;
}
@Override
public void addTo(final EmailAddress to) {
try {
if (to.hasName()) {
source.addRecipient(RecipientType.TO,
new InternetAddress(to.getEmail(), to.getName()));
} else {
source.addRecipient(RecipientType.TO,
new InternetAddress(to.getEmail()));
}
} catch (final Exception e) {
throw new GmailException("Failed adding To recipient", e);
}
}
@Override
public List<EmailAddress> getTo() {
try {
return getAddresses(RecipientType.TO);
} catch (final Exception e) {
throw new GmailException("Failed getting List of To recipients", e);
}
}
@Override
public List<EmailAddress> getCc() {
try {
return getAddresses(RecipientType.CC);
} catch (final Exception e) {
throw new GmailException("Failed getting List of Cc recipients", e);
}
}
/**
* Gets a {@link List} of {@link EmailAddress} by {@link RecipientType}
*
* @param type Recipient type
* @return List of Addresses
* @throws MessagingException in case something is wrong
*/
private List<EmailAddress> getAddresses(final RecipientType type)
throws MessagingException {
final List<EmailAddress> addresses = new ArrayList<EmailAddress>();
for (final Address addr : source.getRecipients(type)) {
final InternetAddress temp = (InternetAddress) addr;
addresses.add(new EmailAddress(temp.getPersonal(), temp.getAddress()));
}
return addresses;
}
@Override
public void setFrom(final EmailAddress from) {
try {
if (from.hasName()) {
source.setFrom(new InternetAddress(
from.getEmail(), from.getName()));
} else {
source.setFrom(new InternetAddress(from.getEmail()));
}
} catch (final Exception e) {
throw new GmailException("Failed setting from address", e);
}
}
@Override
public EmailAddress getFrom() {
if (from == null) {
try {
final InternetAddress f = (InternetAddress) source.getFrom()[0];
from = new EmailAddress(f.getPersonal(), f.getAddress());
} catch (final Exception e) {
throw new GmailException("Failed getting from address", e);
}
}
return from;
}
@Override
public Date getSendDate() {
try {
return source.getSentDate();
} catch (final Exception e) {
throw new GmailException("Failed getting send date", e);
}
}
@Override
public void setSubject(final String subject) {
try {
source.setSubject(subject);
} catch (final Exception e) {
throw new GmailException("Failed setting subject", e);
}
}
@Override
public String getSubject() {
try {
return source.getSubject();
} catch (final Exception e) {
throw new GmailException("Failed getting message subject", e);
}
}
@Override
public void setContentText(final String contentText) {
try {
source.setText(contentText);
} catch (final Exception e) {
throw new GmailException("Failed settting content text", e);
}
}
@Override
public String getPreview() {
try {
return source.getContent().toString();
} catch (final Exception e) {
throw new GmailException("Failed getting message preview", e);
}
}
@Override
public String toString() {
if (toString != null) {
return toString.toString();
}
toString = new StringBuilder();
toString.append("MailMessage:{from:").append(getFrom())
.append(";sendDate:").append(getSendDate())
.append(";subject:").append(getSubject())
.append(";preview:").append(getPreview()).append(";}");
return toString.toString();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.